ansible-playbook [core 2.17.5]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-SZh
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.6 (main, Sep  9 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_stratis.yml ****************************************************
1 plays in /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml

PLAY [Test stratis pool management] ********************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:2
Saturday 02 November 2024  18:53:29 -0400 (0:00:00.013)       0:00:00.013 ***** 
[WARNING]: Platform linux on host managed-node2 is using the discovered Python
interpreter at /usr/bin/python3.9, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node2]

TASK [Run the role] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:14
Saturday 02 November 2024  18:53:30 -0400 (0:00:01.503)       0:00:01.517 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:53:30 -0400 (0:00:00.052)       0:00:01.570 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:53:30 -0400 (0:00:00.037)       0:00:01.607 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:53:30 -0400 (0:00:00.061)       0:00:01.668 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:53:30 -0400 (0:00:00.071)       0:00:01.740 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:53:31 -0400 (0:00:00.546)       0:00:02.286 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:53:31 -0400 (0:00:00.028)       0:00:02.315 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:53:31 -0400 (0:00:00.026)       0:00:02.341 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:53:31 -0400 (0:00:00.028)       0:00:02.369 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:53:31 -0400 (0:00:00.077)       0:00:02.447 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:53:33 -0400 (0:00:01.541)       0:00:03.988 ***** 
ok: [managed-node2] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.032)       0:00:04.021 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.028)       0:00:04.049 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.665)       0:00:04.714 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.053)       0:00:04.768 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.053)       0:00:04.821 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 02 November 2024  18:53:33 -0400 (0:00:00.057)       0:00:04.879 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:53:34 -0400 (0:00:00.056)       0:00:04.936 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:53:35 -0400 (0:00:01.394)       0:00:06.330 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "snapd.seeded.service": {
                "name": "snapd.seeded.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles.service": {
                "name": "systemd-tmpfiles.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "yppasswdd.service": {
                "name": "yppasswdd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypserv.service": {
                "name": "ypserv.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypxfrd.service": {
                "name": "ypxfrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:53:37 -0400 (0:00:02.072)       0:00:08.402 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:53:37 -0400 (0:00:00.054)       0:00:08.457 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:53:37 -0400 (0:00:00.016)       0:00:08.474 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.532)       0:00:09.006 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.051)       0:00:09.057 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587978.0113251,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730587977.4203188,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730587977.4203188,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.369)       0:00:09.426 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.026)       0:00:09.453 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.017)       0:00:09.471 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.023)       0:00:09.494 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.022)       0:00:09.516 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.023)       0:00:09.540 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.039)       0:00:09.579 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.019)       0:00:09.599 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.039)       0:00:09.638 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.039)       0:00:09.677 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:53:38 -0400 (0:00:00.024)       0:00:09.702 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:53:39 -0400 (0:00:00.380)       0:00:10.083 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:53:39 -0400 (0:00:00.025)       0:00:10.108 ***** 
ok: [managed-node2]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:18
Saturday 02 November 2024  18:53:40 -0400 (0:00:00.918)       0:00:11.027 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Gather package facts] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:25
Saturday 02 November 2024  18:53:40 -0400 (0:00:00.046)       0:00:11.074 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "packages": {
            "NetworkManager": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.51.2"
                }
            ],
            "NetworkManager-libnm": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager-libnm",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.51.2"
                }
            ],
            "NetworkManager-team": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager-team",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.51.2"
                }
            ],
            "NetworkManager-tui": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager-tui",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.51.2"
                }
            ],
            "acl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "acl",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.3.1"
                }
            ],
            "alternatives": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "alternatives",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.24"
                }
            ],
            "aspell": [
                {
                    "arch": "x86_64",
                    "epoch": 12,
                    "name": "aspell",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.60.8"
                }
            ],
            "attr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "attr",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.5.1"
                }
            ],
            "audit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "audit",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.1.5"
                }
            ],
            "audit-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "audit-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.1.5"
                }
            ],
            "authselect": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "authselect",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "authselect-compat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "authselect-compat",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "authselect-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "authselect-libs",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "avahi-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "avahi-libs",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "0.8"
                }
            ],
            "basesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "basesystem",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "11"
                }
            ],
            "bash": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bash",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "5.1.8"
                }
            ],
            "bc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bc",
                    "release": "14.el9",
                    "source": "rpm",
                    "version": "1.07.1"
                }
            ],
            "beakerlib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "beakerlib",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.29.3"
                }
            ],
            "beakerlib-redhat": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "beakerlib-redhat",
                    "release": "35.el9",
                    "source": "rpm",
                    "version": "1"
                }
            ],
            "binutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "binutils",
                    "release": "54.el9",
                    "source": "rpm",
                    "version": "2.35.2"
                }
            ],
            "binutils-gold": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "binutils-gold",
                    "release": "54.el9",
                    "source": "rpm",
                    "version": "2.35.2"
                }
            ],
            "bison": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bison",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "3.7.4"
                }
            ],
            "blivet-data": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "blivet-data",
                    "release": "19.el9",
                    "source": "rpm",
                    "version": "3.6.0"
                }
            ],
            "boost-filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-filesystem",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.75.0"
                }
            ],
            "boost-system": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-system",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.75.0"
                }
            ],
            "boost-thread": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-thread",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.75.0"
                }
            ],
            "bzip2-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bzip2-libs",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.0.8"
                }
            ],
            "c-ares": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "c-ares",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.19.1"
                }
            ],
            "ca-certificates": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "ca-certificates",
                    "release": "91.4.el9",
                    "source": "rpm",
                    "version": "2024.2.69_v8.0.303"
                }
            ],
            "centos-gpg-keys": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "centos-gpg-keys",
                    "release": "26.el9",
                    "source": "rpm",
                    "version": "9.0"
                }
            ],
            "centos-stream-release": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "centos-stream-release",
                    "release": "26.el9",
                    "source": "rpm",
                    "version": "9.0"
                }
            ],
            "centos-stream-repos": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "centos-stream-repos",
                    "release": "26.el9",
                    "source": "rpm",
                    "version": "9.0"
                }
            ],
            "checkpolicy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "checkpolicy",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "chrony": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "chrony",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "4.6"
                }
            ],
            "clevis": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "clevis",
                    "release": "203.el9",
                    "source": "rpm",
                    "version": "21"
                }
            ],
            "clevis-luks": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "clevis-luks",
                    "release": "203.el9",
                    "source": "rpm",
                    "version": "21"
                }
            ],
            "cloud-init": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "cloud-init",
                    "release": "19.el9",
                    "source": "rpm",
                    "version": "23.4"
                }
            ],
            "cloud-utils-growpart": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cloud-utils-growpart",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.33"
                }
            ],
            "coreutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "coreutils",
                    "release": "36.el9",
                    "source": "rpm",
                    "version": "8.32"
                }
            ],
            "coreutils-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "coreutils-common",
                    "release": "36.el9",
                    "source": "rpm",
                    "version": "8.32"
                }
            ],
            "cpio": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cpio",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "2.13"
                }
            ],
            "cpp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cpp",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "11.5.0"
                }
            ],
            "cracklib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cracklib",
                    "release": "27.el9",
                    "source": "rpm",
                    "version": "2.9.6"
                }
            ],
            "cracklib-dicts": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cracklib-dicts",
                    "release": "27.el9",
                    "source": "rpm",
                    "version": "2.9.6"
                }
            ],
            "cronie": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cronie",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "1.5.7"
                }
            ],
            "cronie-anacron": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cronie-anacron",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "1.5.7"
                }
            ],
            "crontabs": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "crontabs",
                    "release": "26.20190603git.el9",
                    "source": "rpm",
                    "version": "1.11"
                }
            ],
            "crypto-policies": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "crypto-policies",
                    "release": "2.git626aa59.el9",
                    "source": "rpm",
                    "version": "20240828"
                }
            ],
            "crypto-policies-scripts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "crypto-policies-scripts",
                    "release": "2.git626aa59.el9",
                    "source": "rpm",
                    "version": "20240828"
                }
            ],
            "cryptsetup": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cryptsetup",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.7.2"
                }
            ],
            "cryptsetup-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cryptsetup-libs",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.7.2"
                }
            ],
            "curl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "curl",
                    "release": "31.el9",
                    "source": "rpm",
                    "version": "7.76.1"
                }
            ],
            "cxl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cxl-libs",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "78"
                }
            ],
            "cyrus-sasl-lib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cyrus-sasl-lib",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "2.1.27"
                }
            ],
            "daxctl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "daxctl-libs",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "78"
                }
            ],
            "dbus": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "dbus",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.12.20"
                }
            ],
            "dbus-broker": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dbus-broker",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "28"
                }
            ],
            "dbus-common": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "dbus-common",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.12.20"
                }
            ],
            "dbus-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "dbus-libs",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.12.20"
                }
            ],
            "dbus-tools": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "dbus-tools",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.12.20"
                }
            ],
            "dejavu-sans-fonts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dejavu-sans-fonts",
                    "release": "18.el9",
                    "source": "rpm",
                    "version": "2.37"
                }
            ],
            "device-mapper": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "device-mapper",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.02.201"
                }
            ],
            "device-mapper-event": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "device-mapper-event",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.02.201"
                }
            ],
            "device-mapper-event-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "device-mapper-event-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.02.201"
                }
            ],
            "device-mapper-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "device-mapper-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.02.201"
                }
            ],
            "device-mapper-multipath": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-multipath",
                    "release": "33.el9",
                    "source": "rpm",
                    "version": "0.8.7"
                }
            ],
            "device-mapper-multipath-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-multipath-libs",
                    "release": "33.el9",
                    "source": "rpm",
                    "version": "0.8.7"
                }
            ],
            "device-mapper-persistent-data": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-persistent-data",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "dhcp-client": [
                {
                    "arch": "x86_64",
                    "epoch": 12,
                    "name": "dhcp-client",
                    "release": "19.b1.el9",
                    "source": "rpm",
                    "version": "4.4.2"
                }
            ],
            "dhcp-common": [
                {
                    "arch": "noarch",
                    "epoch": 12,
                    "name": "dhcp-common",
                    "release": "19.b1.el9",
                    "source": "rpm",
                    "version": "4.4.2"
                }
            ],
            "diffutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "diffutils",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "dnf": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "4.14.0"
                }
            ],
            "dnf-data": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf-data",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "4.14.0"
                }
            ],
            "dnf-plugins-core": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf-plugins-core",
                    "release": "17.el9",
                    "source": "rpm",
                    "version": "4.3.0"
                }
            ],
            "dracut": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut",
                    "release": "70.git20240819.el9",
                    "source": "rpm",
                    "version": "057"
                }
            ],
            "dracut-config-rescue": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut-config-rescue",
                    "release": "70.git20240819.el9",
                    "source": "rpm",
                    "version": "057"
                }
            ],
            "dracut-network": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut-network",
                    "release": "70.git20240819.el9",
                    "source": "rpm",
                    "version": "057"
                }
            ],
            "dracut-squash": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut-squash",
                    "release": "70.git20240819.el9",
                    "source": "rpm",
                    "version": "057"
                }
            ],
            "dyninst": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dyninst",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "12.1.0"
                }
            ],
            "e2fsprogs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "e2fsprogs",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.46.5"
                }
            ],
            "e2fsprogs-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "e2fsprogs-libs",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.46.5"
                }
            ],
            "efivar-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "efivar-libs",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "38"
                }
            ],
            "elfutils-debuginfod-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-debuginfod-client",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "elfutils-default-yama-scope": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "elfutils-default-yama-scope",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "elfutils-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-devel",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "elfutils-libelf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libelf",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "elfutils-libelf-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libelf-devel",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "elfutils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libs",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.191"
                }
            ],
            "emacs-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "emacs-filesystem",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "27.2"
                }
            ],
            "epel-release": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "epel-release",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "9"
                }
            ],
            "ethtool": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "ethtool",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "6.11"
                }
            ],
            "expat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "expat",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.5.0"
                }
            ],
            "file": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "file",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "5.39"
                }
            ],
            "file-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "file-libs",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "5.39"
                }
            ],
            "filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "filesystem",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "3.16"
                }
            ],
            "findutils": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "findutils",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "4.8.0"
                }
            ],
            "firewalld": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "firewalld",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.3.4"
                }
            ],
            "firewalld-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "firewalld-filesystem",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.3.4"
                }
            ],
            "flex": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "flex",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "fonts-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "fonts-filesystem",
                    "release": "7.el9.1",
                    "source": "rpm",
                    "version": "2.0.5"
                }
            ],
            "fuse-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fuse-libs",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "2.9.9"
                }
            ],
            "gawk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gawk",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "5.1.0"
                }
            ],
            "gawk-all-langpacks": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gawk-all-langpacks",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "5.1.0"
                }
            ],
            "gcc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gcc",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "11.5.0"
                }
            ],
            "gdbm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "gdbm-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.23"
                }
            ],
            "gdisk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gdisk",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.0.7"
                }
            ],
            "geolite2-city": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "geolite2-city",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "20191217"
                }
            ],
            "geolite2-country": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "geolite2-country",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "20191217"
                }
            ],
            "gettext": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gettext",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.21"
                }
            ],
            "gettext-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gettext-libs",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.21"
                }
            ],
            "git": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "git",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.43.5"
                }
            ],
            "git-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "git-core",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.43.5"
                }
            ],
            "git-core-doc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "git-core-doc",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.43.5"
                }
            ],
            "glib2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glib2",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "2.68.4"
                }
            ],
            "glibc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "glibc-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-common",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "glibc-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-devel",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "glibc-gconv-extra": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-gconv-extra",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "glibc-headers": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-headers",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "glibc-langpack-en": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-langpack-en",
                    "release": "133.el9",
                    "source": "rpm",
                    "version": "2.34"
                }
            ],
            "gmp": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "gmp",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "6.2.0"
                }
            ],
            "gnupg2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gnupg2",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.3.3"
                }
            ],
            "gnutls": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gnutls",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "3.8.3"
                }
            ],
            "gobject-introspection": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gobject-introspection",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "1.68.0"
                }
            ],
            "gpg-pubkey": [
                {
                    "arch": null,
                    "epoch": null,
                    "name": "gpg-pubkey",
                    "release": "613798eb",
                    "source": "rpm",
                    "version": "3228467c"
                },
                {
                    "arch": null,
                    "epoch": null,
                    "name": "gpg-pubkey",
                    "release": "5ccc5b19",
                    "source": "rpm",
                    "version": "8483c65d"
                }
            ],
            "gpgme": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gpgme",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.15.1"
                }
            ],
            "gpm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gpm-libs",
                    "release": "29.el9",
                    "source": "rpm",
                    "version": "1.20.7"
                }
            ],
            "grep": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "grep",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "groff-base": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "groff-base",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.22.4"
                }
            ],
            "grub2-common": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "grub2-common",
                    "release": "93.el9",
                    "source": "rpm",
                    "version": "2.06"
                }
            ],
            "grub2-pc": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-pc",
                    "release": "93.el9",
                    "source": "rpm",
                    "version": "2.06"
                }
            ],
            "grub2-pc-modules": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "grub2-pc-modules",
                    "release": "93.el9",
                    "source": "rpm",
                    "version": "2.06"
                }
            ],
            "grub2-tools": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-tools",
                    "release": "93.el9",
                    "source": "rpm",
                    "version": "2.06"
                }
            ],
            "grub2-tools-minimal": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-tools-minimal",
                    "release": "93.el9",
                    "source": "rpm",
                    "version": "2.06"
                }
            ],
            "grubby": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "grubby",
                    "release": "63.el9",
                    "source": "rpm",
                    "version": "8.40"
                }
            ],
            "gssproxy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gssproxy",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.8.4"
                }
            ],
            "gzip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gzip",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.12"
                }
            ],
            "hostname": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "hostname",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.23"
                }
            ],
            "hwdata": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "hwdata",
                    "release": "9.15.el9",
                    "source": "rpm",
                    "version": "0.348"
                }
            ],
            "ima-evm-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ima-evm-utils",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.5"
                }
            ],
            "inih": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "inih",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "49"
                }
            ],
            "initscripts-rename-device": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "initscripts-rename-device",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "10.11.8"
                }
            ],
            "initscripts-service": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "initscripts-service",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "10.11.8"
                }
            ],
            "ipcalc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ipcalc",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.0.0"
                }
            ],
            "iproute": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iproute",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "6.2.0"
                }
            ],
            "iproute-tc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iproute-tc",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "6.2.0"
                }
            ],
            "ipset": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ipset",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "7.11"
                }
            ],
            "ipset-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ipset-libs",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "7.11"
                }
            ],
            "iptables-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iptables-libs",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.8.10"
                }
            ],
            "iptables-nft": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iptables-nft",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.8.10"
                }
            ],
            "iputils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iputils",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "20210202"
                }
            ],
            "irqbalance": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "irqbalance",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.9.4"
                }
            ],
            "iwl100-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl100-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "39.31.5.1"
                }
            ],
            "iwl1000-firmware": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "iwl1000-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "39.31.5.1"
                }
            ],
            "iwl105-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl105-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "18.168.6.1"
                }
            ],
            "iwl135-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl135-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "18.168.6.1"
                }
            ],
            "iwl2000-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl2000-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "18.168.6.1"
                }
            ],
            "iwl2030-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl2030-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "18.168.6.1"
                }
            ],
            "iwl3160-firmware": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "iwl3160-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "25.30.13.0"
                }
            ],
            "iwl5000-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl5000-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "8.83.5.1_1"
                }
            ],
            "iwl5150-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl5150-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "8.24.2.2"
                }
            ],
            "iwl6000g2a-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl6000g2a-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "18.168.6.1"
                }
            ],
            "iwl6050-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "iwl6050-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "41.28.5.1"
                }
            ],
            "iwl7260-firmware": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "iwl7260-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "25.30.13.0"
                }
            ],
            "jansson": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jansson",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.14"
                }
            ],
            "jitterentropy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jitterentropy",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.5.0"
                }
            ],
            "jose": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jose",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "14"
                }
            ],
            "jq": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jq",
                    "release": "17.el9",
                    "source": "rpm",
                    "version": "1.6"
                }
            ],
            "json-c": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "json-c",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "0.14"
                }
            ],
            "kbd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kbd",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "2.4.0"
                }
            ],
            "kbd-legacy": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "kbd-legacy",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "2.4.0"
                }
            ],
            "kbd-misc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "kbd-misc",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "2.4.0"
                }
            ],
            "kernel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-core",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-devel",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-headers": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-headers",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-modules": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-modules",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-modules-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-modules-core",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-tools",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kernel-tools-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-tools-libs",
                    "release": "522.el9",
                    "source": "rpm",
                    "version": "5.14.0"
                }
            ],
            "kexec-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kexec-tools",
                    "release": "18.el9",
                    "source": "rpm",
                    "version": "2.0.27"
                }
            ],
            "keyutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "keyutils",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.6.3"
                }
            ],
            "keyutils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "keyutils-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.6.3"
                }
            ],
            "kmod": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kmod",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "28"
                }
            ],
            "kmod-kvdo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kmod-kvdo",
                    "release": "144.el9",
                    "source": "rpm",
                    "version": "8.2.4.15"
                }
            ],
            "kmod-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kmod-libs",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "28"
                }
            ],
            "kpartx": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kpartx",
                    "release": "33.el9",
                    "source": "rpm",
                    "version": "0.8.7"
                }
            ],
            "krb5-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "krb5-libs",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "1.21.1"
                }
            ],
            "langpacks-core-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-core-en",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "3.0"
                }
            ],
            "langpacks-core-font-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-core-font-en",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "3.0"
                }
            ],
            "langpacks-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-en",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "3.0"
                }
            ],
            "less": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "less",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "590"
                }
            ],
            "libacl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libacl",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.3.1"
                }
            ],
            "libaio": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libaio",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "0.3.111"
                }
            ],
            "libarchive": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libarchive",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "3.5.3"
                }
            ],
            "libassuan": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libassuan",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.5.5"
                }
            ],
            "libattr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libattr",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.5.1"
                }
            ],
            "libbasicobjects": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbasicobjects",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "0.1.1"
                }
            ],
            "libblkid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblkid",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "libblockdev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-crypto": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-crypto",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-dm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-dm",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-fs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-fs",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-kbd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-kbd",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-loop": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-loop",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-lvm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-lvm",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-mdraid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-mdraid",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-mpath": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-mpath",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-nvdimm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-nvdimm",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-part": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-part",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-swap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-swap",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libblockdev-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-utils",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "libbpf": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "libbpf",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.4.0"
                }
            ],
            "libbrotli": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbrotli",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "libbytesize": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbytesize",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.5"
                }
            ],
            "libcap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcap",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "2.48"
                }
            ],
            "libcap-ng": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcap-ng",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.8.2"
                }
            ],
            "libcap-ng-python3": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcap-ng-python3",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.8.2"
                }
            ],
            "libcbor": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcbor",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "0.7.0"
                }
            ],
            "libcollection": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcollection",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "0.7.0"
                }
            ],
            "libcom_err": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcom_err",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.46.5"
                }
            ],
            "libcomps": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcomps",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.1.18"
                }
            ],
            "libcurl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcurl",
                    "release": "31.el9",
                    "source": "rpm",
                    "version": "7.76.1"
                }
            ],
            "libdaemon": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdaemon",
                    "release": "23.el9",
                    "source": "rpm",
                    "version": "0.14"
                }
            ],
            "libdb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdb",
                    "release": "55.el9",
                    "source": "rpm",
                    "version": "5.3.28"
                }
            ],
            "libdhash": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdhash",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "0.5.0"
                }
            ],
            "libdnf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdnf",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "0.69.0"
                }
            ],
            "libeconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libeconf",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.4.1"
                }
            ],
            "libedit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libedit",
                    "release": "38.20210216cvs.el9",
                    "source": "rpm",
                    "version": "3.1"
                }
            ],
            "libestr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libestr",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.1.11"
                }
            ],
            "libev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libev",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "4.33"
                }
            ],
            "libevent": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libevent",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "2.1.12"
                }
            ],
            "libfastjson": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfastjson",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "0.99.9"
                }
            ],
            "libfdisk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfdisk",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "libffi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libffi",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "3.4.2"
                }
            ],
            "libfido2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfido2",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.13.0"
                }
            ],
            "libgcc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgcc",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "11.5.0"
                }
            ],
            "libgcrypt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgcrypt",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "1.10.0"
                }
            ],
            "libgomp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgomp",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "11.5.0"
                }
            ],
            "libgpg-error": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgpg-error",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.42"
                }
            ],
            "libidn2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libidn2",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "libini_config": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libini_config",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "1.3.1"
                }
            ],
            "libjose": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libjose",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "14"
                }
            ],
            "libkcapi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libkcapi",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.4.0"
                }
            ],
            "libkcapi-hmaccalc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libkcapi-hmaccalc",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.4.0"
                }
            ],
            "libksba": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libksba",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.5.1"
                }
            ],
            "libldb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libldb",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.9.1"
                }
            ],
            "libluksmeta": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libluksmeta",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "9"
                }
            ],
            "libmaxminddb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmaxminddb",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "1.5.2"
                }
            ],
            "libmnl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmnl",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "1.0.4"
                }
            ],
            "libmodulemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmodulemd",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.13.0"
                }
            ],
            "libmount": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmount",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "libmpc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmpc",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "1.2.1"
                }
            ],
            "libndp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libndp",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.9"
                }
            ],
            "libnetfilter_conntrack": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnetfilter_conntrack",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "libnfnetlink": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnfnetlink",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "1.0.1"
                }
            ],
            "libnfsidmap": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "libnfsidmap",
                    "release": "27.el9",
                    "source": "rpm",
                    "version": "2.5.4"
                }
            ],
            "libnftnl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnftnl",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "libnghttp2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnghttp2",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.43.0"
                }
            ],
            "libnl3": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnl3",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.0"
                }
            ],
            "libnl3-cli": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnl3-cli",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.0"
                }
            ],
            "libpath_utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpath_utils",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "0.2.1"
                }
            ],
            "libpipeline": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpipeline",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "1.5.3"
                }
            ],
            "libpkgconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpkgconf",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.7.3"
                }
            ],
            "libpsl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpsl",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "0.21.1"
                }
            ],
            "libpwquality": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpwquality",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.4.4"
                }
            ],
            "libref_array": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libref_array",
                    "release": "53.el9",
                    "source": "rpm",
                    "version": "0.1.5"
                }
            ],
            "librepo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "librepo",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.14.5"
                }
            ],
            "libreport-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "libreport-filesystem",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "2.15.2"
                }
            ],
            "libseccomp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libseccomp",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.5.2"
                }
            ],
            "libselinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libselinux",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "libselinux-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libselinux-utils",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "libsemanage": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsemanage",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "libsepol": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsepol",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "libsigsegv": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsigsegv",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.13"
                }
            ],
            "libsmartcols": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsmartcols",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "libsolv": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsolv",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "0.7.24"
                }
            ],
            "libss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libss",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.46.5"
                }
            ],
            "libssh": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libssh",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "0.10.4"
                }
            ],
            "libssh-config": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "libssh-config",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "0.10.4"
                }
            ],
            "libsss_certmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_certmap",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "libsss_idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_idmap",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "libsss_nss_idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_nss_idmap",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "libsss_sudo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_sudo",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "libstdc++": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libstdc++",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "11.5.0"
                }
            ],
            "libsysfs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsysfs",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "2.1.1"
                }
            ],
            "libtalloc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtalloc",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.4.2"
                }
            ],
            "libtasn1": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtasn1",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "4.16.0"
                }
            ],
            "libtdb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtdb",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.4.10"
                }
            ],
            "libteam": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libteam",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "1.31"
                }
            ],
            "libtevent": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtevent",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.16.1"
                }
            ],
            "libtirpc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtirpc",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "1.3.3"
                }
            ],
            "libunistring": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libunistring",
                    "release": "15.el9",
                    "source": "rpm",
                    "version": "0.9.10"
                }
            ],
            "liburing": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "liburing",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.5"
                }
            ],
            "libuser": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libuser",
                    "release": "15.el9",
                    "source": "rpm",
                    "version": "0.63"
                }
            ],
            "libutempter": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libutempter",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.2.1"
                }
            ],
            "libuuid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libuuid",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "libverto": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libverto",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "0.3.2"
                }
            ],
            "libverto-libev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libverto-libev",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "0.3.2"
                }
            ],
            "libxcrypt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxcrypt",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "4.4.18"
                }
            ],
            "libxcrypt-compat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxcrypt-compat",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "4.4.18"
                }
            ],
            "libxcrypt-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxcrypt-devel",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "4.4.18"
                }
            ],
            "libxml2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxml2",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "2.9.13"
                }
            ],
            "libxslt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxslt",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "1.1.34"
                }
            ],
            "libyaml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libyaml",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.2.5"
                }
            ],
            "libzstd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libzstd",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.5.1"
                }
            ],
            "libzstd-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libzstd-devel",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.5.1"
                }
            ],
            "linux-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "linux-firmware",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "20241021"
                }
            ],
            "linux-firmware-whence": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "linux-firmware-whence",
                    "release": "147.el9",
                    "source": "rpm",
                    "version": "20241021"
                }
            ],
            "lmdb-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lmdb-libs",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "0.9.29"
                }
            ],
            "logrotate": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "logrotate",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "3.18.0"
                }
            ],
            "lshw": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lshw",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "B.02.19.2"
                }
            ],
            "lsof": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lsof",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "4.94.0"
                }
            ],
            "lsscsi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lsscsi",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "0.32"
                }
            ],
            "lua-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lua-libs",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.4.4"
                }
            ],
            "luksmeta": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "luksmeta",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "9"
                }
            ],
            "lvm2": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "lvm2",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.03.27"
                }
            ],
            "lvm2-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 9,
                    "name": "lvm2-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.03.27"
                }
            ],
            "lz4-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lz4-libs",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "1.9.3"
                }
            ],
            "lzo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lzo",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "2.10"
                }
            ],
            "m4": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "m4",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.4.19"
                }
            ],
            "make": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "make",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "4.3"
                }
            ],
            "man-db": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "man-db",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "2.9.3"
                }
            ],
            "mdadm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "mdadm",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "4.3"
                }
            ],
            "microcode_ctl": [
                {
                    "arch": "noarch",
                    "epoch": 4,
                    "name": "microcode_ctl",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "20240910"
                }
            ],
            "mokutil": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "mokutil",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.6.0"
                }
            ],
            "mpfr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "mpfr",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "4.1.0"
                }
            ],
            "ncurses": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ncurses",
                    "release": "10.20210508.el9",
                    "source": "rpm",
                    "version": "6.2"
                }
            ],
            "ncurses-base": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "ncurses-base",
                    "release": "10.20210508.el9",
                    "source": "rpm",
                    "version": "6.2"
                }
            ],
            "ncurses-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ncurses-libs",
                    "release": "10.20210508.el9",
                    "source": "rpm",
                    "version": "6.2"
                }
            ],
            "ndctl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ndctl",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "78"
                }
            ],
            "ndctl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ndctl-libs",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "78"
                }
            ],
            "nettle": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nettle",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.1"
                }
            ],
            "newt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "newt",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "0.52.21"
                }
            ],
            "nfs-utils": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "nfs-utils",
                    "release": "27.el9",
                    "source": "rpm",
                    "version": "2.5.4"
                }
            ],
            "nftables": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "nftables",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "npth": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "npth",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.6"
                }
            ],
            "nspr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nspr",
                    "release": "14.el9",
                    "source": "rpm",
                    "version": "4.35.0"
                }
            ],
            "nss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.101.0"
                }
            ],
            "nss-softokn": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-softokn",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.101.0"
                }
            ],
            "nss-softokn-freebl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-softokn-freebl",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.101.0"
                }
            ],
            "nss-sysinit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-sysinit",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.101.0"
                }
            ],
            "nss-util": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-util",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.101.0"
                }
            ],
            "numactl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "numactl-libs",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.0.18"
                }
            ],
            "oddjob": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "oddjob",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.34.7"
                }
            ],
            "oddjob-mkhomedir": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "oddjob-mkhomedir",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.34.7"
                }
            ],
            "oniguruma": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "oniguruma",
                    "release": "1.el9.6",
                    "source": "rpm",
                    "version": "6.9.6"
                }
            ],
            "openldap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openldap",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.6.6"
                }
            ],
            "openssh": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh",
                    "release": "44.el9",
                    "source": "rpm",
                    "version": "8.7p1"
                }
            ],
            "openssh-clients": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh-clients",
                    "release": "44.el9",
                    "source": "rpm",
                    "version": "8.7p1"
                }
            ],
            "openssh-server": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh-server",
                    "release": "44.el9",
                    "source": "rpm",
                    "version": "8.7p1"
                }
            ],
            "openssl": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "openssl-devel": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl-devel",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "openssl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl-libs",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "os-prober": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "os-prober",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.77"
                }
            ],
            "p11-kit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "p11-kit",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "0.25.3"
                }
            ],
            "p11-kit-trust": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "p11-kit-trust",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "0.25.3"
                }
            ],
            "pam": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pam",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "1.5.1"
                }
            ],
            "parted": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "parted",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.5"
                }
            ],
            "passwd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "passwd",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "0.80"
                }
            ],
            "pciutils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pciutils-libs",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "3.7.0"
                }
            ],
            "pcre": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcre",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "8.44"
                }
            ],
            "pcre2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcre2",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "10.40"
                }
            ],
            "pcre2-syntax": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "pcre2-syntax",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "10.40"
                }
            ],
            "perl-AutoLoader": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-AutoLoader",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "5.74"
                }
            ],
            "perl-B": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-B",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.80"
                }
            ],
            "perl-Carp": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Carp",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "1.50"
                }
            ],
            "perl-Class-Struct": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Class-Struct",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "0.66"
                }
            ],
            "perl-Data-Dumper": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Data-Dumper",
                    "release": "462.el9",
                    "source": "rpm",
                    "version": "2.174"
                }
            ],
            "perl-Digest": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Digest",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "1.19"
                }
            ],
            "perl-Digest-MD5": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Digest-MD5",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.58"
                }
            ],
            "perl-DynaLoader": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-DynaLoader",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.47"
                }
            ],
            "perl-Encode": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-Encode",
                    "release": "462.el9",
                    "source": "rpm",
                    "version": "3.08"
                }
            ],
            "perl-Errno": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-Errno",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.30"
                }
            ],
            "perl-Error": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Error",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "0.17029"
                }
            ],
            "perl-Exporter": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Exporter",
                    "release": "461.el9",
                    "source": "rpm",
                    "version": "5.74"
                }
            ],
            "perl-Fcntl": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-Fcntl",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.13"
                }
            ],
            "perl-File-Basename": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-Basename",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "2.85"
                }
            ],
            "perl-File-Find": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-Find",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.37"
                }
            ],
            "perl-File-Path": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-File-Path",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.18"
                }
            ],
            "perl-File-Temp": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-File-Temp",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "0.231.100"
                }
            ],
            "perl-File-stat": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-stat",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.09"
                }
            ],
            "perl-FileHandle": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-FileHandle",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "2.03"
                }
            ],
            "perl-Getopt-Long": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Getopt-Long",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.52"
                }
            ],
            "perl-Getopt-Std": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Getopt-Std",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.12"
                }
            ],
            "perl-Git": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Git",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.43.5"
                }
            ],
            "perl-HTTP-Tiny": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-HTTP-Tiny",
                    "release": "462.el9",
                    "source": "rpm",
                    "version": "0.076"
                }
            ],
            "perl-IO": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-IO",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.43"
                }
            ],
            "perl-IO-Socket-IP": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-IO-Socket-IP",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "0.41"
                }
            ],
            "perl-IO-Socket-SSL": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-IO-Socket-SSL",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.073"
                }
            ],
            "perl-IPC-Open3": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-IPC-Open3",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.21"
                }
            ],
            "perl-MIME-Base64": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-MIME-Base64",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "3.16"
                }
            ],
            "perl-Mozilla-CA": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Mozilla-CA",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "20200520"
                }
            ],
            "perl-NDBM_File": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-NDBM_File",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.15"
                }
            ],
            "perl-Net-SSLeay": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Net-SSLeay",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.94"
                }
            ],
            "perl-POSIX": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-POSIX",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.94"
                }
            ],
            "perl-PathTools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-PathTools",
                    "release": "461.el9",
                    "source": "rpm",
                    "version": "3.78"
                }
            ],
            "perl-Pod-Escapes": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Pod-Escapes",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "1.07"
                }
            ],
            "perl-Pod-Perldoc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Pod-Perldoc",
                    "release": "461.el9",
                    "source": "rpm",
                    "version": "3.28.01"
                }
            ],
            "perl-Pod-Simple": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Pod-Simple",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "3.42"
                }
            ],
            "perl-Pod-Usage": [
                {
                    "arch": "noarch",
                    "epoch": 4,
                    "name": "perl-Pod-Usage",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.01"
                }
            ],
            "perl-Scalar-List-Utils": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-Scalar-List-Utils",
                    "release": "462.el9",
                    "source": "rpm",
                    "version": "1.56"
                }
            ],
            "perl-SelectSaver": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-SelectSaver",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.02"
                }
            ],
            "perl-Socket": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-Socket",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.031"
                }
            ],
            "perl-Storable": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "perl-Storable",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "3.21"
                }
            ],
            "perl-Symbol": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Symbol",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.08"
                }
            ],
            "perl-Term-ANSIColor": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Term-ANSIColor",
                    "release": "461.el9",
                    "source": "rpm",
                    "version": "5.01"
                }
            ],
            "perl-Term-Cap": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Term-Cap",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "1.17"
                }
            ],
            "perl-TermReadKey": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-TermReadKey",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "2.38"
                }
            ],
            "perl-Text-ParseWords": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Text-ParseWords",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "3.30"
                }
            ],
            "perl-Text-Tabs+Wrap": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Text-Tabs+Wrap",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "2013.0523"
                }
            ],
            "perl-Time-Local": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "perl-Time-Local",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.300"
                }
            ],
            "perl-URI": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-URI",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "5.09"
                }
            ],
            "perl-base": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-base",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "2.27"
                }
            ],
            "perl-constant": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-constant",
                    "release": "461.el9",
                    "source": "rpm",
                    "version": "1.33"
                }
            ],
            "perl-if": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-if",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "0.60.800"
                }
            ],
            "perl-interpreter": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-interpreter",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "5.32.1"
                }
            ],
            "perl-lib": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-lib",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "0.65"
                }
            ],
            "perl-libnet": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-libnet",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "3.13"
                }
            ],
            "perl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-libs",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "5.32.1"
                }
            ],
            "perl-mro": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-mro",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.23"
                }
            ],
            "perl-overload": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-overload",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.31"
                }
            ],
            "perl-overloading": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-overloading",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "0.02"
                }
            ],
            "perl-parent": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-parent",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "0.238"
                }
            ],
            "perl-podlators": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-podlators",
                    "release": "460.el9",
                    "source": "rpm",
                    "version": "4.14"
                }
            ],
            "perl-subs": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-subs",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.03"
                }
            ],
            "perl-vars": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-vars",
                    "release": "481.el9",
                    "source": "rpm",
                    "version": "1.05"
                }
            ],
            "pigz": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pigz",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.5"
                }
            ],
            "pkgconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pkgconf",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.7.3"
                }
            ],
            "pkgconf-m4": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "pkgconf-m4",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.7.3"
                }
            ],
            "pkgconf-pkg-config": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pkgconf-pkg-config",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.7.3"
                }
            ],
            "policycoreutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "policycoreutils",
                    "release": "2.1.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "popt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "popt",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.18"
                }
            ],
            "prefixdevname": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "prefixdevname",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.1.0"
                }
            ],
            "procps-ng": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "procps-ng",
                    "release": "14.el9",
                    "source": "rpm",
                    "version": "3.3.17"
                }
            ],
            "psmisc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "psmisc",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "23.4"
                }
            ],
            "publicsuffix-list-dafsa": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "publicsuffix-list-dafsa",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "20210518"
                }
            ],
            "python-unversioned-command": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python-unversioned-command",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.20"
                }
            ],
            "python3": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.20"
                }
            ],
            "python3-attrs": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-attrs",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "20.3.0"
                }
            ],
            "python3-audit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-audit",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.1.5"
                }
            ],
            "python3-babel": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-babel",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.9.1"
                }
            ],
            "python3-blivet": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-blivet",
                    "release": "19.el9",
                    "source": "rpm",
                    "version": "3.6.0"
                }
            ],
            "python3-blockdev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-blockdev",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "2.28"
                }
            ],
            "python3-bytesize": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-bytesize",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "2.5"
                }
            ],
            "python3-chardet": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-chardet",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "4.0.0"
                }
            ],
            "python3-configobj": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-configobj",
                    "release": "25.el9",
                    "source": "rpm",
                    "version": "5.0.6"
                }
            ],
            "python3-configshell": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-configshell",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "1.1.30"
                }
            ],
            "python3-dateutil": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-dateutil",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "2.8.1"
                }
            ],
            "python3-dbus": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-dbus",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "1.2.18"
                }
            ],
            "python3-dbus-client-gen": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-client-gen",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.5.1"
                }
            ],
            "python3-dbus-python-client-gen": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-python-client-gen",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.8.3"
                }
            ],
            "python3-dbus-signature-pyparsing": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-signature-pyparsing",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.4.1"
                }
            ],
            "python3-distro": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-distro",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "python3-dnf": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dnf",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "4.14.0"
                }
            ],
            "python3-dnf-plugins-core": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dnf-plugins-core",
                    "release": "17.el9",
                    "source": "rpm",
                    "version": "4.3.0"
                }
            ],
            "python3-firewall": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-firewall",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.3.4"
                }
            ],
            "python3-gobject-base": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-gobject-base",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.40.1"
                }
            ],
            "python3-gobject-base-noarch": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-gobject-base-noarch",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "3.40.1"
                }
            ],
            "python3-gpg": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-gpg",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.15.1"
                }
            ],
            "python3-hawkey": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-hawkey",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "0.69.0"
                }
            ],
            "python3-idna": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-idna",
                    "release": "7.el9.1",
                    "source": "rpm",
                    "version": "2.10"
                }
            ],
            "python3-into-dbus-python": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-into-dbus-python",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.8.2"
                }
            ],
            "python3-jinja2": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jinja2",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "2.11.3"
                }
            ],
            "python3-jsonpatch": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonpatch",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "1.21"
                }
            ],
            "python3-jsonpointer": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonpointer",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.0"
                }
            ],
            "python3-jsonschema": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonschema",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "3.2.0"
                }
            ],
            "python3-justbases": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-justbases",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.15.2"
                }
            ],
            "python3-justbytes": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-justbytes",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.15.2"
                }
            ],
            "python3-kmod": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-kmod",
                    "release": "32.el9",
                    "source": "rpm",
                    "version": "0.9"
                }
            ],
            "python3-libcomps": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libcomps",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.1.18"
                }
            ],
            "python3-libdnf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libdnf",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "0.69.0"
                }
            ],
            "python3-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libs",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.9.20"
                }
            ],
            "python3-libselinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libselinux",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "python3-libsemanage": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libsemanage",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "python3-lxml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-lxml",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "4.6.5"
                }
            ],
            "python3-markupsafe": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-markupsafe",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "1.1.1"
                }
            ],
            "python3-netifaces": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-netifaces",
                    "release": "15.el9",
                    "source": "rpm",
                    "version": "0.10.6"
                }
            ],
            "python3-nftables": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "python3-nftables",
                    "release": "3.el9",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "python3-oauthlib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-oauthlib",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "3.1.1"
                }
            ],
            "python3-packaging": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-packaging",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "20.9"
                }
            ],
            "python3-pip-wheel": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pip-wheel",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "21.3.1"
                }
            ],
            "python3-policycoreutils": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-policycoreutils",
                    "release": "2.1.el9",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "python3-prettytable": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-prettytable",
                    "release": "27.el9",
                    "source": "rpm",
                    "version": "0.7.2"
                }
            ],
            "python3-psutil": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-psutil",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "5.8.0"
                }
            ],
            "python3-pyparsing": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyparsing",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "2.4.7"
                }
            ],
            "python3-pyparted": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "python3-pyparted",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.12.0"
                }
            ],
            "python3-pyrsistent": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-pyrsistent",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.17.3"
                }
            ],
            "python3-pyserial": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyserial",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "3.4"
                }
            ],
            "python3-pysocks": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pysocks",
                    "release": "12.el9",
                    "source": "rpm",
                    "version": "1.7.1"
                }
            ],
            "python3-pytz": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pytz",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "2021.1"
                }
            ],
            "python3-pyudev": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyudev",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "0.22.0"
                }
            ],
            "python3-pyyaml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-pyyaml",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "5.4.1"
                }
            ],
            "python3-requests": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-requests",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "2.25.1"
                }
            ],
            "python3-rpm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-rpm",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "python3-rtslib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-rtslib",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.1.76"
                }
            ],
            "python3-setools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-setools",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "4.4.4"
                }
            ],
            "python3-setuptools": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-setuptools",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "53.0.0"
                }
            ],
            "python3-setuptools-wheel": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-setuptools-wheel",
                    "release": "13.el9",
                    "source": "rpm",
                    "version": "53.0.0"
                }
            ],
            "python3-six": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-six",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "1.15.0"
                }
            ],
            "python3-systemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-systemd",
                    "release": "19.el9",
                    "source": "rpm",
                    "version": "234"
                }
            ],
            "python3-urllib3": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-urllib3",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "1.26.5"
                }
            ],
            "python3-urwid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-urwid",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.1.2"
                }
            ],
            "python3-wcwidth": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-wcwidth",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "0.2.5"
                }
            ],
            "qa-tools": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "qa-tools",
                    "release": "5.el9",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "qemu-guest-agent": [
                {
                    "arch": "x86_64",
                    "epoch": 17,
                    "name": "qemu-guest-agent",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "9.1.0"
                }
            ],
            "quota": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "quota",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "4.09"
                }
            ],
            "quota-nls": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "quota-nls",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "4.09"
                }
            ],
            "readline": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "readline",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "8.1"
                }
            ],
            "restraint": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "restraint",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.4.5"
                }
            ],
            "restraint-rhts": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "restraint-rhts",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "0.4.5"
                }
            ],
            "rng-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rng-tools",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "6.16"
                }
            ],
            "rootfiles": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "rootfiles",
                    "release": "31.el9",
                    "source": "rpm",
                    "version": "8.1"
                }
            ],
            "rpcbind": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpcbind",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "rpm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-build-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-build-libs",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-libs",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-plugin-audit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-plugin-audit",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-plugin-selinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-plugin-selinux",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-plugin-systemd-inhibit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-plugin-systemd-inhibit",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rpm-sign-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-sign-libs",
                    "release": "34.el9",
                    "source": "rpm",
                    "version": "4.16.1.3"
                }
            ],
            "rsync": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rsync",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "3.2.3"
                }
            ],
            "rsyslog": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rsyslog",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "8.2310.0"
                }
            ],
            "rsyslog-logrotate": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rsyslog-logrotate",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "8.2310.0"
                }
            ],
            "sed": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sed",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "4.8"
                }
            ],
            "selinux-policy": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "selinux-policy",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "38.1.47"
                }
            ],
            "selinux-policy-targeted": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "selinux-policy-targeted",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "38.1.47"
                }
            ],
            "setup": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "setup",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "2.13.7"
                }
            ],
            "sg3_utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sg3_utils",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "1.47"
                }
            ],
            "sg3_utils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sg3_utils-libs",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "1.47"
                }
            ],
            "shadow-utils": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "shadow-utils",
                    "release": "9.el9",
                    "source": "rpm",
                    "version": "4.9"
                }
            ],
            "slang": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "slang",
                    "release": "11.el9",
                    "source": "rpm",
                    "version": "2.3.2"
                }
            ],
            "snappy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "snappy",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.1.8"
                }
            ],
            "sqlite-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sqlite-libs",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "3.34.1"
                }
            ],
            "squashfs-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "squashfs-tools",
                    "release": "10.git1.el9",
                    "source": "rpm",
                    "version": "4.4"
                }
            ],
            "sssd-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-client",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "sssd-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-common",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "sssd-kcm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-kcm",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "sssd-nfs-idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-nfs-idmap",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "2.9.5"
                }
            ],
            "strace": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "strace",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "5.18"
                }
            ],
            "stratis-cli": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "stratis-cli",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.7.0"
                }
            ],
            "stratisd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "stratisd",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.7.3"
                }
            ],
            "sudo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sudo",
                    "release": "10.el9",
                    "source": "rpm",
                    "version": "1.9.5p2"
                }
            ],
            "systemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd",
                    "release": "48.el9",
                    "source": "rpm",
                    "version": "252"
                }
            ],
            "systemd-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-libs",
                    "release": "48.el9",
                    "source": "rpm",
                    "version": "252"
                }
            ],
            "systemd-pam": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-pam",
                    "release": "48.el9",
                    "source": "rpm",
                    "version": "252"
                }
            ],
            "systemd-rpm-macros": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "systemd-rpm-macros",
                    "release": "48.el9",
                    "source": "rpm",
                    "version": "252"
                }
            ],
            "systemd-udev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-udev",
                    "release": "48.el9",
                    "source": "rpm",
                    "version": "252"
                }
            ],
            "systemtap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.1"
                }
            ],
            "systemtap-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-client",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.1"
                }
            ],
            "systemtap-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-devel",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.1"
                }
            ],
            "systemtap-runtime": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-runtime",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.1"
                }
            ],
            "tar": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "tar",
                    "release": "7.el9",
                    "source": "rpm",
                    "version": "1.34"
                }
            ],
            "target-restore": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "target-restore",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "2.1.76"
                }
            ],
            "targetcli": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "targetcli",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2.1.57"
                }
            ],
            "tbb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tbb",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "2020.3"
                }
            ],
            "teamd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "teamd",
                    "release": "16.el9",
                    "source": "rpm",
                    "version": "1.31"
                }
            ],
            "time": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "time",
                    "release": "18.el9",
                    "source": "rpm",
                    "version": "1.9"
                }
            ],
            "tpm2-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tpm2-tools",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "5.2"
                }
            ],
            "tpm2-tss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tpm2-tss",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "3.2.3"
                }
            ],
            "tzdata": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "tzdata",
                    "release": "2.el9",
                    "source": "rpm",
                    "version": "2024b"
                }
            ],
            "unzip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "unzip",
                    "release": "57.el9",
                    "source": "rpm",
                    "version": "6.0"
                }
            ],
            "userspace-rcu": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "userspace-rcu",
                    "release": "6.el9",
                    "source": "rpm",
                    "version": "0.12.1"
                }
            ],
            "util-linux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "util-linux",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "util-linux-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "util-linux-core",
                    "release": "20.el9",
                    "source": "rpm",
                    "version": "2.37.4"
                }
            ],
            "vdo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "vdo",
                    "release": "1.el9",
                    "source": "rpm",
                    "version": "8.2.2.2"
                }
            ],
            "vim-common": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-common",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "8.2.2637"
                }
            ],
            "vim-enhanced": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-enhanced",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "8.2.2637"
                }
            ],
            "vim-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "vim-filesystem",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "8.2.2637"
                }
            ],
            "vim-minimal": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-minimal",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "8.2.2637"
                }
            ],
            "volume_key-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "volume_key-libs",
                    "release": "15.el9",
                    "source": "rpm",
                    "version": "0.3.12"
                }
            ],
            "wget": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "wget",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "1.21.1"
                }
            ],
            "which": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "which",
                    "release": "29.el9",
                    "source": "rpm",
                    "version": "2.21"
                }
            ],
            "xfsprogs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xfsprogs",
                    "release": "4.el9",
                    "source": "rpm",
                    "version": "6.4.0"
                }
            ],
            "xz": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xz",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "5.2.5"
                }
            ],
            "xz-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xz-devel",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "5.2.5"
                }
            ],
            "xz-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xz-libs",
                    "release": "8.el9",
                    "source": "rpm",
                    "version": "5.2.5"
                }
            ],
            "yum": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "yum",
                    "release": "21.el9",
                    "source": "rpm",
                    "version": "4.14.0"
                }
            ],
            "yum-utils": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "yum-utils",
                    "release": "17.el9",
                    "source": "rpm",
                    "version": "4.3.0"
                }
            ],
            "zip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zip",
                    "release": "35.el9",
                    "source": "rpm",
                    "version": "3.0"
                }
            ],
            "zlib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zlib",
                    "release": "41.el9",
                    "source": "rpm",
                    "version": "1.2.11"
                }
            ],
            "zlib-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zlib-devel",
                    "release": "41.el9",
                    "source": "rpm",
                    "version": "1.2.11"
                }
            ]
        }
    },
    "changed": false
}

TASK [Set blivet package name] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:28
Saturday 02 November 2024  18:53:41 -0400 (0:00:01.359)       0:00:12.434 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "blivet_pkg_name": [
            "python3-blivet"
        ]
    },
    "changed": false
}

TASK [Set blivet package version] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:32
Saturday 02 November 2024  18:53:41 -0400 (0:00:00.130)       0:00:12.565 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "blivet_pkg_version": "3.6.0-19.el9"
    },
    "changed": false
}

TASK [Set distribution version] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:36
Saturday 02 November 2024  18:53:41 -0400 (0:00:00.113)       0:00:12.678 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "is_fedora": false,
        "is_rhel10": false,
        "is_rhel78": false,
        "is_rhel9": true
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:47
Saturday 02 November 2024  18:53:41 -0400 (0:00:00.219)       0:00:12.898 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node2

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Saturday 02 November 2024  18:53:42 -0400 (0:00:00.115)       0:00:13.014 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux-core

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Saturday 02 November 2024  18:53:43 -0400 (0:00:01.354)       0:00:14.369 ***** 
ok: [managed-node2] => {
    "changed": false,
    "disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Saturday 02 November 2024  18:53:45 -0400 (0:00:01.619)       0:00:15.988 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Saturday 02 November 2024  18:53:45 -0400 (0:00:00.082)       0:00:16.071 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "unused_disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Saturday 02 November 2024  18:53:45 -0400 (0:00:00.109)       0:00:16.180 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Saturday 02 November 2024  18:53:45 -0400 (0:00:00.116)       0:00:16.297 ***** 
ok: [managed-node2] => {
    "unused_disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi"
    ]
}

TASK [Start stratisd service] **************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:55
Saturday 02 November 2024  18:53:45 -0400 (0:00:00.088)       0:00:16.385 ***** 
changed: [managed-node2] => {
    "changed": true,
    "name": "stratisd",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "systemd-journald.socket dbus.socket system.slice local-fs.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.storage.stratis3",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "DefaultDependencies": "no",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "Delegate": "no",
        "Description": "Stratis daemon",
        "DevicePolicy": "auto",
        "Documentation": "\"man:stratisd(8)\"",
        "DynamicUser": "no",
        "Environment": "RUST_BACKTRACE=1",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/stratisd.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "18446744073709551615",
        "IOReadOperations": "18446744073709551615",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "18446744073709551615",
        "IOWriteOperations": "18446744073709551615",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "stratisd.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "process",
        "KillSignal": "2",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13955",
        "LimitNPROCSoft": "13955",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13955",
        "LimitSIGPENDINGSoft": "13955",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "infinity",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemorySwapMax": "infinity",
        "MountAPIVFS": "no",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "stratisd.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "system.slice dbus.socket",
        "Restart": "on-abort",
        "RestartKillSignal": "2",
        "RestartUSec": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "Slice": "system.slice",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StateChangeTimestamp": "Sat 2024-11-02 18:26:23 EDT",
        "StateChangeTimestampMonotonic": "417486443",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22328",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "enabled",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [Create one Stratis pool with one volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:60
Saturday 02 November 2024  18:53:46 -0400 (0:00:01.004)       0:00:17.389 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:53:46 -0400 (0:00:00.093)       0:00:17.483 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:53:46 -0400 (0:00:00.100)       0:00:17.583 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:53:46 -0400 (0:00:00.183)       0:00:17.767 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.136)       0:00:17.904 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.072)       0:00:17.976 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.047)       0:00:18.023 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.039)       0:00:18.062 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.039)       0:00:18.102 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.077)       0:00:18.179 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.041)       0:00:18.221 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.042)       0:00:18.263 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.039)       0:00:18.303 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.055)       0:00:18.359 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.067)       0:00:18.427 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.065)       0:00:18.493 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.077)       0:00:18.570 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.252)       0:00:18.823 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:53:47 -0400 (0:00:00.043)       0:00:18.866 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/xvda1",
        "/dev/stratis/foo/test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
            "state": "mounted"
        }
    ],
    "packages": [
        "stratisd",
        "xfsprogs",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:53:58 -0400 (0:00:10.472)       0:00:29.338 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:53:58 -0400 (0:00:00.055)       0:00:29.394 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587978.0113251,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730587977.4203188,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730587977.4203188,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:53:58 -0400 (0:00:00.395)       0:00:29.790 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.495)       0:00:30.285 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.049)       0:00:30.335 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/xvda1",
            "/dev/stratis/foo/test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                "state": "mounted"
            }
        ],
        "packages": [
            "stratisd",
            "xfsprogs",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.080)       0:00:30.416 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.070)       0:00:30.487 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.068)       0:00:30.555 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:53:59 -0400 (0:00:00.094)       0:00:30.649 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:54:00 -0400 (0:00:00.784)       0:00:31.434 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:54:01 -0400 (0:00:00.690)       0:00:32.124 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:54:01 -0400 (0:00:00.128)       0:00:32.253 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:54:02 -0400 (0:00:00.797)       0:00:33.050 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:54:02 -0400 (0:00:00.385)       0:00:33.436 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:54:02 -0400 (0:00:00.041)       0:00:33.477 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:73
Saturday 02 November 2024  18:54:03 -0400 (0:00:00.968)       0:00:34.445 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:54:03 -0400 (0:00:00.127)       0:00:34.572 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:54:03 -0400 (0:00:00.103)       0:00:34.676 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:54:03 -0400 (0:00:00.094)       0:00:34.770 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:54:04 -0400 (0:00:00.491)       0:00:35.261 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003123",
    "end": "2024-11-02 18:54:04.748792",
    "rc": 0,
    "start": "2024-11-02 18:54:04.745669"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=b062ee93-ef86-4262-a663-741cc15318f4 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:54:04 -0400 (0:00:00.455)       0:00:35.717 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003056",
    "end": "2024-11-02 18:54:05.113013",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:54:05.109957"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.389)       0:00:36.106 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.160)       0:00:36.266 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.112)       0:00:36.379 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.062)       0:00:36.441 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.058)       0:00:36.500 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.141)       0:00:36.642 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.063)       0:00:36.705 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.061)       0:00:36.767 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.059)       0:00:36.826 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:54:05 -0400 (0:00:00.060)       0:00:36.887 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.059)       0:00:36.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.057)       0:00:37.005 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.058)       0:00:37.063 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.039)       0:00:37.103 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.042)       0:00:37.146 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:178752): WARNING **: 18:54:06.490: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.427)       0:00:37.573 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.180)       0:00:37.753 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.081)       0:00:37.834 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:54:06 -0400 (0:00:00.038)       0:00:37.873 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.035)       0:00:37.909 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.042)       0:00:37.952 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.054)       0:00:38.006 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.056)       0:00:38.063 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.040)       0:00:38.104 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.046)       0:00:38.151 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.043)       0:00:38.195 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.039)       0:00:38.234 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.034)       0:00:38.268 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.039)       0:00:38.308 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.108)       0:00:38.416 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.040)       0:00:38.457 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.073)       0:00:38.531 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.042)       0:00:38.574 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.076)       0:00:38.651 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.063)       0:00:38.715 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.047)       0:00:38.763 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.037)       0:00:38.800 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:54:07 -0400 (0:00:00.038)       0:00:38.839 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:54:08 -0400 (0:00:00.077)       0:00:38.917 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:54:08 -0400 (0:00:00.077)       0:00:38.994 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:54:08 -0400 (0:00:00.082)       0:00:39.077 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.377704",
    "end": "2024-11-02 18:54:08.842574",
    "rc": 0,
    "start": "2024-11-02 18:54:08.464870"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:54:08 -0400 (0:00:00.768)       0:00:39.845 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.101)       0:00:39.947 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.104)       0:00:40.051 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.065)       0:00:40.117 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.065)       0:00:40.183 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.063)       0:00:40.246 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.061)       0:00:40.308 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.111)       0:00:40.419 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.098)       0:00:40.518 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:54:09 -0400 (0:00:00.323)       0:00:40.841 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.079)       0:00:40.920 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.108)       0:00:41.029 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.064)       0:00:41.093 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.074)       0:00:41.168 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.065)       0:00:41.234 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.064)       0:00:41.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.064)       0:00:41.362 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.062)       0:00:41.425 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.064)       0:00:41.490 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.062)       0:00:41.552 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.065)       0:00:41.618 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=b062ee93-ef86-4262-a663-741cc15318f4 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.136)       0:00:41.754 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:54:10 -0400 (0:00:00.104)       0:00:41.859 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.182)       0:00:42.042 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.091)       0:00:42.133 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.071)       0:00:42.205 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.067)       0:00:42.273 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.064)       0:00:42.338 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.062)       0:00:42.400 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588038.3049679,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588038.3049679,
        "dev": 5,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5701,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588038.3049679,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:54:11 -0400 (0:00:00.457)       0:00:42.858 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.125)       0:00:42.996 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.069)       0:00:43.065 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.084)       0:00:43.150 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.076)       0:00:43.226 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.043)       0:00:43.270 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.042)       0:00:43.312 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:54:12 -0400 (0:00:00.040)       0:00:43.354 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:54:13 -0400 (0:00:01.306)       0:00:44.660 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:54:13 -0400 (0:00:00.086)       0:00:44.746 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:54:13 -0400 (0:00:00.036)       0:00:44.782 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:54:13 -0400 (0:00:00.074)       0:00:44.857 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.056)       0:00:44.914 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.082)       0:00:44.997 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.063)       0:00:45.060 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.064)       0:00:45.124 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.062)       0:00:45.186 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.151)       0:00:45.338 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.113)       0:00:45.451 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.098)       0:00:45.550 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.099)       0:00:45.649 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.081)       0:00:45.731 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.073)       0:00:45.804 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:54:14 -0400 (0:00:00.062)       0:00:45.866 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.135)       0:00:46.001 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.060)       0:00:46.062 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.056)       0:00:46.119 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.067)       0:00:46.186 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.078)       0:00:46.265 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.061)       0:00:46.326 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.044)       0:00:46.371 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.045)       0:00:46.417 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:54:15 -0400 (0:00:00.041)       0:00:46.459 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.512)       0:00:46.971 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.081)       0:00:47.053 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.089)       0:00:47.142 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.045)       0:00:47.187 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.053)       0:00:47.241 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.051)       0:00:47.293 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.136)       0:00:47.430 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.074)       0:00:47.504 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.086)       0:00:47.591 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.042)       0:00:47.634 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.042)       0:00:47.677 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.066)       0:00:47.743 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.059)       0:00:47.802 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:54:16 -0400 (0:00:00.061)       0:00:47.864 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.074)       0:00:47.938 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.065)       0:00:48.004 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.056)       0:00:48.061 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.064)       0:00:48.126 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.035)       0:00:48.161 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.034)       0:00:48.195 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.033)       0:00:48.229 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.069)       0:00:48.299 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.044)       0:00:48.344 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.056)       0:00:48.400 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.066)       0:00:48.467 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.086)       0:00:48.554 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.108)       0:00:48.662 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.068)       0:00:48.731 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.061)       0:00:48.792 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.039)       0:00:48.832 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:54:17 -0400 (0:00:00.041)       0:00:48.874 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.042)       0:00:48.917 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.037)       0:00:48.954 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.034)       0:00:48.989 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.037)       0:00:49.026 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.067)       0:00:49.093 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:76
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.047)       0:00:49.141 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.080)       0:00:49.221 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.059)       0:00:49.281 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.059)       0:00:49.340 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.090)       0:00:49.431 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.063)       0:00:49.495 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.066)       0:00:49.561 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.067)       0:00:49.629 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.067)       0:00:49.696 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:54:18 -0400 (0:00:00.140)       0:00:49.836 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.113)       0:00:49.950 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.074)       0:00:50.025 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.096)       0:00:50.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.083)       0:00:50.205 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.105)       0:00:50.310 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.062)       0:00:50.372 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.052)       0:00:50.425 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.117)       0:00:50.542 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:54:19 -0400 (0:00:00.042)       0:00:50.585 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "stratis-cli",
        "stratisd"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:54:23 -0400 (0:00:03.752)       0:00:54.337 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:54:23 -0400 (0:00:00.134)       0:00:54.472 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588041.1149976,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "bce1a66d3189295168fd9b4f1a377789d2fb6bad",
        "ctime": 1730588041.1119976,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588041.1119976,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.457)       0:00:54.929 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.072)       0:00:55.001 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.059)       0:00:55.061 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "stratis-cli",
            "stratisd"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.079)       0:00:55.141 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.217)       0:00:55.359 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.109)       0:00:55.468 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:54:24 -0400 (0:00:00.190)       0:00:55.659 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:54:25 -0400 (0:00:00.835)       0:00:56.494 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:54:26 -0400 (0:00:00.489)       0:00:56.983 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:54:26 -0400 (0:00:00.115)       0:00:57.098 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:54:27 -0400 (0:00:00.825)       0:00:57.923 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:54:27 -0400 (0:00:00.418)       0:00:58.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:54:27 -0400 (0:00:00.060)       0:00:58.403 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:89
Saturday 02 November 2024  18:54:28 -0400 (0:00:01.035)       0:00:59.438 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:54:28 -0400 (0:00:00.127)       0:00:59.566 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:54:28 -0400 (0:00:00.116)       0:00:59.683 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:54:28 -0400 (0:00:00.163)       0:00:59.846 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:54:29 -0400 (0:00:00.458)       0:01:00.305 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002999",
    "end": "2024-11-02 18:54:29.826811",
    "rc": 0,
    "start": "2024-11-02 18:54:29.823812"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=b062ee93-ef86-4262-a663-741cc15318f4 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:54:29 -0400 (0:00:00.531)       0:01:00.837 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003010",
    "end": "2024-11-02 18:54:30.267763",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:54:30.264753"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.444)       0:01:01.281 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.193)       0:01:01.474 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.070)       0:01:01.544 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.063)       0:01:01.608 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.063)       0:01:01.671 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:54:30 -0400 (0:00:00.162)       0:01:01.834 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.109)       0:01:01.944 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.104)       0:01:02.048 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.092)       0:01:02.141 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.110)       0:01:02.252 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.179)       0:01:02.431 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.081)       0:01:02.512 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.089)       0:01:02.602 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.072)       0:01:02.675 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:54:31 -0400 (0:00:00.080)       0:01:02.756 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:180531): WARNING **: 18:54:32.137: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.474)       0:01:03.230 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.091)       0:01:03.322 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.134)       0:01:03.456 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.065)       0:01:03.522 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.064)       0:01:03.587 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.060)       0:01:03.647 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.041)       0:01:03.688 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.045)       0:01:03.734 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:54:32 -0400 (0:00:00.117)       0:01:03.852 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.060)       0:01:03.912 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.057)       0:01:03.970 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.046)       0:01:04.016 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.046)       0:01:04.063 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.042)       0:01:04.106 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.073)       0:01:04.180 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.043)       0:01:04.224 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.074)       0:01:04.298 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.043)       0:01:04.342 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.077)       0:01:04.420 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.061)       0:01:04.481 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.077)       0:01:04.559 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.035)       0:01:04.595 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.039)       0:01:04.635 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.077)       0:01:04.713 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.044)       0:01:04.757 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:54:33 -0400 (0:00:00.083)       0:01:04.840 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.337590",
    "end": "2024-11-02 18:54:34.604331",
    "rc": 0,
    "start": "2024-11-02 18:54:34.266741"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:54:34 -0400 (0:00:00.758)       0:01:05.598 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:54:34 -0400 (0:00:00.103)       0:01:05.701 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:54:34 -0400 (0:00:00.103)       0:01:05.805 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:54:34 -0400 (0:00:00.065)       0:01:05.871 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.062)       0:01:05.933 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.064)       0:01:05.998 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.127)       0:01:06.126 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.114)       0:01:06.240 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.102)       0:01:06.342 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.223)       0:01:06.565 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.066)       0:01:06.632 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.074)       0:01:06.706 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.036)       0:01:06.743 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.040)       0:01:06.784 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.035)       0:01:06.819 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:54:35 -0400 (0:00:00.035)       0:01:06.855 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.056)       0:01:06.911 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.059)       0:01:06.971 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.154)       0:01:07.126 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.055)       0:01:07.181 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.056)       0:01:07.238 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=b062ee93-ef86-4262-a663-741cc15318f4 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.089)       0:01:07.327 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.061)       0:01:07.389 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.070)       0:01:07.459 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.101)       0:01:07.561 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.111)       0:01:07.673 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.132)       0:01:07.805 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:54:36 -0400 (0:00:00.063)       0:01:07.869 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.055)       0:01:07.924 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588038.3049679,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588038.3049679,
        "dev": 5,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5701,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588038.3049679,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.445)       0:01:08.369 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.078)       0:01:08.448 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.063)       0:01:08.512 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.071)       0:01:08.583 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.130)       0:01:08.714 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.064)       0:01:08.779 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:54:37 -0400 (0:00:00.071)       0:01:08.850 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:54:38 -0400 (0:00:00.070)       0:01:08.921 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:54:39 -0400 (0:00:01.429)       0:01:10.350 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.060)       0:01:10.411 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.059)       0:01:10.471 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.119)       0:01:10.590 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.062)       0:01:10.652 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.063)       0:01:10.715 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.082)       0:01:10.798 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:54:39 -0400 (0:00:00.058)       0:01:10.857 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.057)       0:01:10.914 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.111)       0:01:11.026 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.236)       0:01:11.263 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.131)       0:01:11.395 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.112)       0:01:11.507 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.095)       0:01:11.602 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.068)       0:01:11.670 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.061)       0:01:11.732 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.062)       0:01:11.794 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:54:40 -0400 (0:00:00.062)       0:01:11.856 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.063)       0:01:11.920 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.072)       0:01:11.993 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.066)       0:01:12.059 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.073)       0:01:12.133 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.075)       0:01:12.209 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.071)       0:01:12.280 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.072)       0:01:12.352 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:54:41 -0400 (0:00:00.543)       0:01:12.895 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.098)       0:01:12.994 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.102)       0:01:13.097 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.059)       0:01:13.156 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.068)       0:01:13.225 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.065)       0:01:13.290 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.063)       0:01:13.354 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.059)       0:01:13.414 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.090)       0:01:13.504 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.058)       0:01:13.562 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.109)       0:01:13.672 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.118)       0:01:13.791 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:54:42 -0400 (0:00:00.092)       0:01:13.884 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.085)       0:01:13.969 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.069)       0:01:14.039 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.150)       0:01:14.190 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.068)       0:01:14.258 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.073)       0:01:14.332 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.076)       0:01:14.408 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.062)       0:01:14.471 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.058)       0:01:14.529 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.051)       0:01:14.581 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.058)       0:01:14.640 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.057)       0:01:14.697 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.051)       0:01:14.749 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.048)       0:01:14.797 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.064)       0:01:14.862 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:54:43 -0400 (0:00:00.037)       0:01:14.899 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.037)       0:01:14.937 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.035)       0:01:14.972 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.104)       0:01:15.077 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.044)       0:01:15.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.049)       0:01:15.171 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.045)       0:01:15.216 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.042)       0:01:15.259 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.034)       0:01:15.294 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Add second filesystem to the pool] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:92
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.041)       0:01:15.336 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.096)       0:01:15.432 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.110)       0:01:15.542 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.111)       0:01:15.654 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.137)       0:01:15.791 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:54:44 -0400 (0:00:00.082)       0:01:15.873 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.136)       0:01:16.009 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.069)       0:01:16.079 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.107)       0:01:16.187 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.226)       0:01:16.414 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.109)       0:01:16.523 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                },
                {
                    "mount_point": "/opt/test2",
                    "name": "test2",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.101)       0:01:16.624 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.115)       0:01:16.740 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:54:45 -0400 (0:00:00.128)       0:01:16.869 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:54:46 -0400 (0:00:00.097)       0:01:16.967 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:54:46 -0400 (0:00:00.075)       0:01:17.043 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:54:46 -0400 (0:00:00.070)       0:01:17.114 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:54:46 -0400 (0:00:00.157)       0:01:17.271 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:54:46 -0400 (0:00:00.062)       0:01:17.333 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test2",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test2",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1",
        "/dev/stratis/foo/test2"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
            "state": "mounted"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "stratisd",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_kernel_device": "/dev/dm-6",
                    "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "_raw_kernel_device": "/dev/dm-6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:54:55 -0400 (0:00:09.488)       0:01:26.822 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:54:56 -0400 (0:00:00.112)       0:01:26.934 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588041.1149976,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "bce1a66d3189295168fd9b4f1a377789d2fb6bad",
        "ctime": 1730588041.1119976,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588041.1119976,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:54:56 -0400 (0:00:00.567)       0:01:27.502 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.550)       0:01:28.053 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.089)       0:01:28.143 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test2",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test2",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1",
            "/dev/stratis/foo/test2"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                "state": "mounted"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "stratisd",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.097)       0:01:28.241 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.092)       0:01:28.333 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.048)       0:01:28.382 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:54:57 -0400 (0:00:00.081)       0:01:28.463 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:54:58 -0400 (0:00:00.803)       0:01:29.267 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:54:59 -0400 (0:00:01.004)       0:01:30.272 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item={'src': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:54:59 -0400 (0:00:00.142)       0:01:30.415 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:55:00 -0400 (0:00:00.818)       0:01:31.234 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:55:00 -0400 (0:00:00.416)       0:01:31.650 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:55:00 -0400 (0:00:00.045)       0:01:31.695 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:108
Saturday 02 November 2024  18:55:02 -0400 (0:00:01.390)       0:01:33.086 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:55:02 -0400 (0:00:00.279)       0:01:33.366 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_kernel_device": "/dev/dm-6",
                    "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "_raw_kernel_device": "/dev/dm-6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:55:02 -0400 (0:00:00.162)       0:01:33.528 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:55:02 -0400 (0:00:00.145)       0:01:33.674 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d223f04f546248b69daf0bcf2988f0df-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
        },
        "/dev/stratis/foo/test2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/stratis/foo/test2",
            "size": "4G",
            "type": "stratis",
            "uuid": "ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:55:03 -0400 (0:00:00.457)       0:01:34.132 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002953",
    "end": "2024-11-02 18:55:03.553729",
    "rc": 0,
    "start": "2024-11-02 18:55:03.550776"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=b062ee93-ef86-4262-a663-741cc15318f4 /opt/test1 xfs defaults 0 0
UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:55:03 -0400 (0:00:00.449)       0:01:34.582 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003425",
    "end": "2024-11-02 18:55:04.044519",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:55:04.041094"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.457)       0:01:35.040 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.168)       0:01:35.208 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.068)       0:01:35.277 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.068)       0:01:35.345 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.066)       0:01:35.412 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.211)       0:01:35.623 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.079)       0:01:35.703 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.071)       0:01:35.775 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:55:04 -0400 (0:00:00.068)       0:01:35.843 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.066)       0:01:35.910 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.066)       0:01:35.977 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.051)       0:01:36.029 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.043)       0:01:36.073 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.047)       0:01:36.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.042)       0:01:36.163 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:182570): WARNING **: 18:55:05.497: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.407)       0:01:36.571 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:55:05 -0400 (0:00:00.095)       0:01:36.666 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.273)       0:01:36.939 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.104)       0:01:37.044 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.099)       0:01:37.143 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.094)       0:01:37.237 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.077)       0:01:37.315 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.064)       0:01:37.379 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.066)       0:01:37.446 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.065)       0:01:37.511 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.109)       0:01:37.621 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.127)       0:01:37.749 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:55:06 -0400 (0:00:00.126)       0:01:37.875 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.130)       0:01:38.006 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.170)       0:01:38.177 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.099)       0:01:38.276 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.183)       0:01:38.459 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.068)       0:01:38.528 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.082)       0:01:38.611 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.063)       0:01:38.674 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.035)       0:01:38.710 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.033)       0:01:38.744 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.041)       0:01:38.785 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:55:07 -0400 (0:00:00.079)       0:01:38.865 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:55:08 -0400 (0:00:00.087)       0:01:38.952 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:55:08 -0400 (0:00:00.123)       0:01:39.076 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.338898",
    "end": "2024-11-02 18:55:08.855206",
    "rc": 0,
    "start": "2024-11-02 18:55:08.516308"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test2",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9"
                },
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:55:08 -0400 (0:00:00.798)       0:01:39.874 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "e413a7fc-117d-4574-a376-73891db18f22"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "0e740f57-98bb-4cc4-a6de-baf7011f8b18"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "40d11307-b089-4ffd-a2e4-2956ca95ef98"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "021b72bd-8ac1-4dd1-97a9-b699ed067e27"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "47c2037b-8a0b-4d3b-bda2-e60716c8c9b6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "c7bfaf35-4aa1-4246-a94c-a315f4f163b7"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "79a07dc2-d906-4072-81b5-e3e42d9d46cf"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9c3208a9-9d54-4146-89bd-8b5057ae32f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "2894f483-d240-4a62-859a-686b8ccad4ec"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test2",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9"
                        },
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "b062ee93-ef86-4262-a663-741cc15318f4"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "d223f04f-5462-48b6-9daf-0bcf2988f0df"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.097)       0:01:39.972 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.078)       0:01:40.051 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.051)       0:01:40.102 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.044)       0:01:40.147 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.041)       0:01:40.189 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.039)       0:01:40.228 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.082)       0:01:40.311 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.097)       0:01:40.408 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.218)       0:01:40.627 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.064)       0:01:40.691 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.066)       0:01:40.758 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.087)       0:01:40.845 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:55:09 -0400 (0:00:00.048)       0:01:40.893 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.054)       0:01:40.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.084)       0:01:41.032 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.051)       0:01:41.083 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.049)       0:01:41.132 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.048)       0:01:41.181 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.048)       0:01:41.229 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.072)       0:01:41.302 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=b062ee93-ef86-4262-a663-741cc15318f4 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.102)       0:01:41.404 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.105)       0:01:41.510 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.075)       0:01:41.586 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.075)       0:01:41.662 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.064)       0:01:41.727 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:55:10 -0400 (0:00:00.154)       0:01:41.881 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.063)       0:01:41.945 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.070)       0:01:42.015 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588038.3049679,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588038.3049679,
        "dev": 5,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5701,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588038.3049679,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.387)       0:01:42.402 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.045)       0:01:42.448 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.038)       0:01:42.486 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.044)       0:01:42.531 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.042)       0:01:42.573 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.038)       0:01:42.612 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.048)       0:01:42.660 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:55:11 -0400 (0:00:00.057)       0:01:42.718 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:55:13 -0400 (0:00:01.356)       0:01:44.074 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.053)       0:01:44.127 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.066)       0:01:44.194 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.078)       0:01:44.272 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.128)       0:01:44.401 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.066)       0:01:44.467 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.064)       0:01:44.532 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.069)       0:01:44.602 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.095)       0:01:44.697 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:55:13 -0400 (0:00:00.150)       0:01:44.847 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.085)       0:01:44.933 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.092)       0:01:45.025 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.061)       0:01:45.086 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.061)       0:01:45.148 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.045)       0:01:45.193 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.062)       0:01:45.256 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.065)       0:01:45.321 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.065)       0:01:45.387 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.061)       0:01:45.448 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.151)       0:01:45.600 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.074)       0:01:45.674 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.058)       0:01:45.733 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.061)       0:01:45.794 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.053)       0:01:45.848 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:55:14 -0400 (0:00:00.042)       0:01:45.891 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.441)       0:01:46.332 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.103)       0:01:46.435 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.101)       0:01:46.536 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.071)       0:01:46.608 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.096)       0:01:46.704 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.100)       0:01:46.805 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:55:15 -0400 (0:00:00.069)       0:01:46.874 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.062)       0:01:46.937 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.064)       0:01:47.001 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.116)       0:01:47.117 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.063)       0:01:47.181 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.062)       0:01:47.244 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.077)       0:01:47.321 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.066)       0:01:47.388 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.111)       0:01:47.500 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.084)       0:01:47.584 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.064)       0:01:47.649 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.065)       0:01:47.715 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.052)       0:01:47.767 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.050)       0:01:47.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.041)       0:01:47.859 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:55:16 -0400 (0:00:00.037)       0:01:47.897 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.049)       0:01:47.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.117)       0:01:48.064 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.075)       0:01:48.140 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.082)       0:01:48.223 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.094)       0:01:48.317 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.092)       0:01:48.410 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.077)       0:01:48.488 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.073)       0:01:48.562 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.087)       0:01:48.649 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.065)       0:01:48.715 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.052)       0:01:48.767 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.044)       0:01:48.811 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:55:17 -0400 (0:00:00.051)       0:01:48.863 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.103)       0:01:48.967 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.354)       0:01:49.321 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test2"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.084)       0:01:49.406 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.124)       0:01:49.531 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.066)       0:01:49.597 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.131)       0:01:49.728 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.077)       0:01:49.805 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:55:18 -0400 (0:00:00.062)       0:01:49.868 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.089)       0:01:49.957 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.090)       0:01:50.048 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.066)       0:01:50.115 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.078)       0:01:50.194 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.070)       0:01:50.265 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.181)       0:01:50.446 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.163)       0:01:50.610 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:55:19 -0400 (0:00:00.160)       0:01:50.771 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.226)       0:01:50.997 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.109)       0:01:51.107 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.066)       0:01:51.173 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.069)       0:01:51.242 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.077)       0:01:51.319 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588095.6805792,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588095.6805792,
        "dev": 5,
        "device_type": 64774,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5742,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588095.6805792,
        "nlink": 1,
        "path": "/dev/stratis/foo/test2",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:55:20 -0400 (0:00:00.503)       0:01:51.823 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.148)       0:01:51.972 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.133)       0:01:52.106 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.129)       0:01:52.235 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.122)       0:01:52.358 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.117)       0:01:52.475 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.120)       0:01:52.596 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:55:21 -0400 (0:00:00.115)       0:01:52.711 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:55:23 -0400 (0:00:01.473)       0:01:54.185 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:55:23 -0400 (0:00:00.112)       0:01:54.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:55:23 -0400 (0:00:00.180)       0:01:54.478 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:55:23 -0400 (0:00:00.161)       0:01:54.640 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:55:23 -0400 (0:00:00.096)       0:01:54.737 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:55:23 -0400 (0:00:00.103)       0:01:54.841 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.092)       0:01:54.934 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.088)       0:01:55.022 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.091)       0:01:55.114 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.155)       0:01:55.269 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.160)       0:01:55.429 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.182)       0:01:55.612 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:55:24 -0400 (0:00:00.161)       0:01:55.773 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.195)       0:01:55.968 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.074)       0:01:56.043 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.079)       0:01:56.122 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.125)       0:01:56.247 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.254)       0:01:56.501 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.114)       0:01:56.615 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.118)       0:01:56.734 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:55:25 -0400 (0:00:00.121)       0:01:56.856 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:55:26 -0400 (0:00:00.122)       0:01:56.979 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:55:26 -0400 (0:00:00.135)       0:01:57.115 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:55:26 -0400 (0:00:00.115)       0:01:57.230 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:55:26 -0400 (0:00:00.084)       0:01:57.315 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:55:26 -0400 (0:00:00.517)       0:01:57.833 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.116)       0:01:57.949 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.108)       0:01:58.058 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.109)       0:01:58.167 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.151)       0:01:58.318 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.129)       0:01:58.447 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.170)       0:01:58.618 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.094)       0:01:58.712 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.103)       0:01:58.816 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:55:27 -0400 (0:00:00.064)       0:01:58.880 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.062)       0:01:58.943 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.063)       0:01:59.007 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.070)       0:01:59.078 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.066)       0:01:59.144 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.066)       0:01:59.211 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.061)       0:01:59.272 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.063)       0:01:59.336 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.062)       0:01:59.399 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.062)       0:01:59.461 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.064)       0:01:59.526 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.065)       0:01:59.592 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.224)       0:01:59.816 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:55:28 -0400 (0:00:00.073)       0:01:59.889 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.111)       0:02:00.001 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.138)       0:02:00.139 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.096)       0:02:00.236 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.176)       0:02:00.413 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.067)       0:02:00.481 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.095)       0:02:00.576 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.089)       0:02:00.665 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.082)       0:02:00.748 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:55:29 -0400 (0:00:00.089)       0:02:00.838 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.101)       0:02:00.939 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.147)       0:02:01.086 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.101)       0:02:01.188 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.120)       0:02:01.309 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:111
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.206)       0:02:01.515 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.168)       0:02:01.684 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.081)       0:02:01.765 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:55:30 -0400 (0:00:00.099)       0:02:01.865 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.117)       0:02:01.982 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.090)       0:02:02.073 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.072)       0:02:02.145 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.070)       0:02:02.216 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.087)       0:02:02.303 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.216)       0:02:02.520 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.115)       0:02:02.635 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                },
                {
                    "mount_point": "/opt/test2",
                    "name": "test2",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.172)       0:02:02.808 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:55:31 -0400 (0:00:00.059)       0:02:02.867 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.056)       0:02:02.923 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.064)       0:02:02.988 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.088)       0:02:03.076 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.073)       0:02:03.150 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.131)       0:02:03.281 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:55:32 -0400 (0:00:00.061)       0:02:03.343 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test2",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test2",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test2",
            "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
            "state": "absent"
        },
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:55:45 -0400 (0:00:12.863)       0:02:16.207 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:55:45 -0400 (0:00:00.108)       0:02:16.316 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588099.2536173,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "d75c1d7f5e42d2f69007d8b2d7e65f57b27dbfa1",
        "ctime": 1730588099.2506173,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588099.2506173,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1506,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:55:45 -0400 (0:00:00.487)       0:02:16.803 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:55:46 -0400 (0:00:00.509)       0:02:17.313 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:55:46 -0400 (0:00:00.070)       0:02:17.384 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test2",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test2",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test2",
                "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                "state": "absent"
            },
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:55:46 -0400 (0:00:00.090)       0:02:17.475 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:55:46 -0400 (0:00:00.175)       0:02:17.650 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:55:46 -0400 (0:00:00.069)       0:02:17.720 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test2",
        "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "state": "absent"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=b062ee93-ef86-4262-a663-741cc15318f4"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:55:47 -0400 (0:00:00.859)       0:02:18.579 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:55:48 -0400 (0:00:00.790)       0:02:19.369 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:55:48 -0400 (0:00:00.127)       0:02:19.496 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:55:48 -0400 (0:00:00.107)       0:02:19.604 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:55:49 -0400 (0:00:00.776)       0:02:20.380 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:55:49 -0400 (0:00:00.419)       0:02:20.799 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:55:49 -0400 (0:00:00.037)       0:02:20.837 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:130
Saturday 02 November 2024  18:55:50 -0400 (0:00:00.962)       0:02:21.800 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:55:50 -0400 (0:00:00.081)       0:02:21.881 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:55:51 -0400 (0:00:00.067)       0:02:21.949 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:55:51 -0400 (0:00:00.058)       0:02:22.007 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:55:51 -0400 (0:00:00.474)       0:02:22.481 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004464",
    "end": "2024-11-02 18:55:52.921259",
    "rc": 0,
    "start": "2024-11-02 18:55:51.916795"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:55:53 -0400 (0:00:01.454)       0:02:23.936 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003282",
    "end": "2024-11-02 18:55:53.434404",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:55:53.431122"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:55:53 -0400 (0:00:00.502)       0:02:24.439 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:55:53 -0400 (0:00:00.180)       0:02:24.619 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:55:53 -0400 (0:00:00.067)       0:02:24.686 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:55:53 -0400 (0:00:00.083)       0:02:24.770 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:55:53 -0400 (0:00:00.088)       0:02:24.859 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.119)       0:02:24.979 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.051)       0:02:25.030 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.040)       0:02:25.071 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.036)       0:02:25.107 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.037)       0:02:25.145 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.109)       0:02:25.255 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.098)       0:02:25.353 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.078)       0:02:25.432 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.104)       0:02:25.537 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:55:54 -0400 (0:00:00.125)       0:02:25.662 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:185078): WARNING **: 18:55:55.050: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.490)       0:02:26.153 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.182)       0:02:26.335 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.202)       0:02:26.538 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.086)       0:02:26.625 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.086)       0:02:26.711 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:55:55 -0400 (0:00:00.094)       0:02:26.806 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.122)       0:02:26.928 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.119)       0:02:27.047 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.086)       0:02:27.133 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.185)       0:02:27.318 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.063)       0:02:27.382 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.075)       0:02:27.458 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.081)       0:02:27.539 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.100)       0:02:27.640 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:55:56 -0400 (0:00:00.170)       0:02:27.810 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:55:57 -0400 (0:00:00.138)       0:02:27.948 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:55:57 -0400 (0:00:00.222)       0:02:28.171 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:55:57 -0400 (0:00:00.170)       0:02:28.341 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:55:57 -0400 (0:00:00.256)       0:02:28.598 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:55:57 -0400 (0:00:00.218)       0:02:28.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.240)       0:02:29.058 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.096)       0:02:29.154 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.084)       0:02:29.239 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.180)       0:02:29.419 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=b062ee93-ef86-4262-a663-741cc15318f4",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.095)       0:02:29.515 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:55:58 -0400 (0:00:00.152)       0:02:29.667 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.339908",
    "end": "2024-11-02 18:55:59.547989",
    "rc": 0,
    "start": "2024-11-02 18:55:59.208081"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:55:59 -0400 (0:00:00.896)       0:02:30.563 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:55:59 -0400 (0:00:00.170)       0:02:30.734 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:55:59 -0400 (0:00:00.122)       0:02:30.857 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.130)       0:02:30.987 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.088)       0:02:31.076 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.087)       0:02:31.163 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.088)       0:02:31.251 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=b062ee93-ef86-4262-a663-741cc15318f4'})
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=ba08c36b-5f1b-48b9-bb6b-53d91a63e5b9'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.221)       0:02:31.473 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:56:00 -0400 (0:00:00.172)       0:02:31.645 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.365)       0:02:32.011 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.115)       0:02:32.126 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.106)       0:02:32.233 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.065)       0:02:32.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.061)       0:02:32.360 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.062)       0:02:32.422 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.061)       0:02:32.483 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.063)       0:02:32.547 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.062)       0:02:32.609 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.139)       0:02:32.749 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.062)       0:02:32.811 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:56:01 -0400 (0:00:00.066)       0:02:32.878 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.124)       0:02:33.003 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.066)       0:02:33.070 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.072)       0:02:33.142 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.079)       0:02:33.221 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.070)       0:02:33.292 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.068)       0:02:33.360 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.065)       0:02:33.425 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:56:02 -0400 (0:00:00.067)       0:02:33.493 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.415)       0:02:33.909 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.061)       0:02:33.970 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.086)       0:02:34.057 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.066)       0:02:34.124 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.154)       0:02:34.278 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.065)       0:02:34.343 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.065)       0:02:34.409 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:56:03 -0400 (0:00:00.066)       0:02:34.476 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:56:04 -0400 (0:00:01.321)       0:02:35.797 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:56:04 -0400 (0:00:00.066)       0:02:35.864 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.068)       0:02:35.933 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.067)       0:02:36.001 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.098)       0:02:36.099 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.067)       0:02:36.167 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.067)       0:02:36.235 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.075)       0:02:36.311 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.104)       0:02:36.415 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.129)       0:02:36.545 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.119)       0:02:36.665 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.136)       0:02:36.801 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:56:05 -0400 (0:00:00.067)       0:02:36.868 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.107)       0:02:36.976 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.103)       0:02:37.080 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.082)       0:02:37.162 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.084)       0:02:37.246 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.073)       0:02:37.320 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.050)       0:02:37.370 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.046)       0:02:37.417 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.049)       0:02:37.466 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.048)       0:02:37.515 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.062)       0:02:37.577 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.061)       0:02:37.639 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.059)       0:02:37.699 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:56:06 -0400 (0:00:00.091)       0:02:37.791 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.172)       0:02:37.963 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.090)       0:02:38.054 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.046)       0:02:38.100 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.078)       0:02:38.179 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.075)       0:02:38.254 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.056)       0:02:38.311 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.057)       0:02:38.368 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.074)       0:02:38.443 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.044)       0:02:38.488 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.055)       0:02:38.544 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.065)       0:02:38.609 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.063)       0:02:38.673 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.065)       0:02:38.739 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.067)       0:02:38.806 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:56:07 -0400 (0:00:00.078)       0:02:38.884 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.158)       0:02:39.043 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.067)       0:02:39.110 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.064)       0:02:39.174 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.062)       0:02:39.237 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.095)       0:02:39.332 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.087)       0:02:39.419 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.077)       0:02:39.497 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.091)       0:02:39.588 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.078)       0:02:39.667 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.070)       0:02:39.737 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.091)       0:02:39.829 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:56:08 -0400 (0:00:00.064)       0:02:39.893 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.062)       0:02:39.956 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.062)       0:02:40.019 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.061)       0:02:40.080 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.150)       0:02:40.231 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.122)       0:02:40.354 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.101)       0:02:40.455 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.126)       0:02:40.581 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:56:09 -0400 (0:00:00.127)       0:02:40.709 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.268)       0:02:40.978 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test2"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.080)       0:02:41.058 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.107)       0:02:41.166 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.064)       0:02:41.230 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.065)       0:02:41.296 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.080)       0:02:41.377 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.117)       0:02:41.494 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.202)       0:02:41.696 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.066)       0:02:41.763 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:56:10 -0400 (0:00:00.069)       0:02:41.832 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.079)       0:02:41.912 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.100)       0:02:42.012 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.187)       0:02:42.200 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.089)       0:02:42.289 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.066)       0:02:42.356 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.062)       0:02:42.419 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.042)       0:02:42.461 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.065)       0:02:42.526 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.067)       0:02:42.593 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:56:11 -0400 (0:00:00.064)       0:02:42.658 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.454)       0:02:43.113 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.069)       0:02:43.183 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.340)       0:02:43.524 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.110)       0:02:43.634 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.090)       0:02:43.724 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.095)       0:02:43.819 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:56:12 -0400 (0:00:00.072)       0:02:43.892 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:56:13 -0400 (0:00:00.046)       0:02:43.939 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:56:14 -0400 (0:00:01.300)       0:02:45.239 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.068)       0:02:45.307 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.065)       0:02:45.373 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.062)       0:02:45.435 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.066)       0:02:45.502 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.087)       0:02:45.589 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.118)       0:02:45.708 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:56:14 -0400 (0:00:00.109)       0:02:45.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.094)       0:02:45.911 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.264)       0:02:46.176 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.119)       0:02:46.296 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.110)       0:02:46.407 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.093)       0:02:46.500 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.077)       0:02:46.577 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.057)       0:02:46.634 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.038)       0:02:46.673 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.044)       0:02:46.718 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.074)       0:02:46.793 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:56:15 -0400 (0:00:00.084)       0:02:46.877 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.062)       0:02:46.939 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.062)       0:02:47.001 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.095)       0:02:47.097 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.083)       0:02:47.181 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.119)       0:02:47.300 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.297)       0:02:47.597 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.134)       0:02:47.732 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:56:16 -0400 (0:00:00.097)       0:02:47.829 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.098)       0:02:47.929 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.073)       0:02:48.002 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.096)       0:02:48.098 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.093)       0:02:48.192 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.099)       0:02:48.291 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.094)       0:02:48.386 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.094)       0:02:48.480 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.061)       0:02:48.541 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.064)       0:02:48.605 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.064)       0:02:48.670 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.072)       0:02:48.742 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:56:17 -0400 (0:00:00.094)       0:02:48.836 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.254)       0:02:49.091 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.121)       0:02:49.213 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.133)       0:02:49.346 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.095)       0:02:49.442 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.070)       0:02:49.512 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.068)       0:02:49.581 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.067)       0:02:49.648 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.067)       0:02:49.716 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.066)       0:02:49.782 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:56:18 -0400 (0:00:00.087)       0:02:49.870 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.094)       0:02:49.965 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.102)       0:02:50.067 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.150)       0:02:50.218 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.113)       0:02:50.332 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.068)       0:02:50.400 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.179)       0:02:50.579 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.065)       0:02:50.644 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.064)       0:02:50.709 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.061)       0:02:50.770 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:56:19 -0400 (0:00:00.063)       0:02:50.834 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.088)       0:02:50.922 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.059)       0:02:50.982 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create encrypted Stratis pool] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:136
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.064)       0:02:51.046 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.125)       0:02:51.172 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.113)       0:02:51.285 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.118)       0:02:51.403 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.174)       0:02:51.578 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.178)       0:02:51.756 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:56:20 -0400 (0:00:00.078)       0:02:51.834 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.174)       0:02:52.009 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.124)       0:02:52.134 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.182)       0:02:52.317 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.079)       0:02:52.396 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.088)       0:02:52.485 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.084)       0:02:52.569 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.078)       0:02:52.648 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.081)       0:02:52.730 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.080)       0:02:52.810 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:56:21 -0400 (0:00:00.079)       0:02:52.889 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:56:22 -0400 (0:00:00.165)       0:02:53.055 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:56:22 -0400 (0:00:00.061)       0:02:53.116 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/xvda1",
        "/dev/stratis/foo/test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
            "state": "mounted"
        }
    ],
    "packages": [
        "stratisd",
        "stratis-cli",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:58:02 -0400 (0:01:40.279)       0:04:33.396 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:58:02 -0400 (0:00:00.069)       0:04:33.465 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588148.2251394,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588147.5641322,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588147.5641322,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.438)       0:04:33.904 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.515)       0:04:34.419 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.063)       0:04:34.482 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/xvda1",
            "/dev/stratis/foo/test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                "state": "mounted"
            }
        ],
        "packages": [
            "stratisd",
            "stratis-cli",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.115)       0:04:34.598 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.088)       0:04:34.686 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:58:03 -0400 (0:00:00.113)       0:04:34.800 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:58:04 -0400 (0:00:00.153)       0:04:34.953 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:58:04 -0400 (0:00:00.899)       0:04:35.853 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:58:05 -0400 (0:00:00.613)       0:04:36.467 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:58:05 -0400 (0:00:00.215)       0:04:36.682 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:58:06 -0400 (0:00:00.971)       0:04:37.653 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:58:07 -0400 (0:00:00.524)       0:04:38.177 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:58:07 -0400 (0:00:00.071)       0:04:38.249 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:151
Saturday 02 November 2024  18:58:08 -0400 (0:00:01.261)       0:04:39.510 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:58:08 -0400 (0:00:00.132)       0:04:39.643 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:58:08 -0400 (0:00:00.114)       0:04:39.758 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:58:08 -0400 (0:00:00.103)       0:04:39.862 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-12c040f99ef548a687c0626aa570ad34-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-12c040f99ef548a687c0626aa570ad34-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
        },
        "/dev/mapper/stratis-1-private-19e44d27b95549c088a56aa7e375e9ad-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-19e44d27b95549c088a56aa7e375e9ad-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
        },
        "/dev/mapper/stratis-1-private-2c7cd33c4d044c9d95ed699efbde2b67-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-2c7cd33c4d044c9d95ed699efbde2b67-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
        },
        "/dev/mapper/stratis-1-private-38e8e1a24c8343df9695accb23200b24-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-38e8e1a24c8343df9695accb23200b24-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
        },
        "/dev/mapper/stratis-1-private-720d2792bc9c426e9edb792054c8825f-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-720d2792bc9c426e9edb792054c8825f-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
        },
        "/dev/mapper/stratis-1-private-74ecc8862c824e77abdafaebaa43c474-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-74ecc8862c824e77abdafaebaa43c474-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
        },
        "/dev/mapper/stratis-1-private-85599f9f945f4d34ab2c646d3a692085-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-85599f9f945f4d34ab2c646d3a692085-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-c87682aff4f64f618ca2579c183a9c17-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-c87682aff4f64f618ca2579c183a9c17-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
        },
        "/dev/mapper/stratis-1-private-daccb6fb36d546a3ba9912622586c190-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-daccb6fb36d546a3ba9912622586c190-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "8f2afe5e-5bc4-42a2-9c53-a75306158917"
        },
        "/dev/sdb": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "091e64cf-d60b-4010-8a07-fba82fd55bf8"
        },
        "/dev/sdc": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "7582367e-c2d7-4a18-bb1a-da4603daf18e"
        },
        "/dev/sdd": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "e693543d-fd71-4c92-909c-18811f09ecc5"
        },
        "/dev/sde": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "7a724e5b-21cd-484f-bbd0-d018b0435369"
        },
        "/dev/sdf": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "d14cc918-a16c-4f82-86ed-7c95de8c10a8"
        },
        "/dev/sdg": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "855a3859-de23-493c-9cc7-8f655714fc20"
        },
        "/dev/sdh": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "76d67332-3bc1-4daf-815c-8dd505e1ad0b"
        },
        "/dev/sdi": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "38b23489-c4f0-4785-928a-3c60d8180a86"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:58:09 -0400 (0:00:00.511)       0:04:40.373 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002957",
    "end": "2024-11-02 18:58:09.846904",
    "rc": 0,
    "start": "2024-11-02 18:58:09.843947"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:58:09 -0400 (0:00:00.499)       0:04:40.873 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002852",
    "end": "2024-11-02 18:58:10.334331",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:58:10.331479"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:58:10 -0400 (0:00:00.480)       0:04:41.354 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:58:10 -0400 (0:00:00.247)       0:04:41.602 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:58:10 -0400 (0:00:00.146)       0:04:41.748 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:58:10 -0400 (0:00:00.150)       0:04:41.898 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.215)       0:04:42.114 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.192)       0:04:42.307 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.065)       0:04:42.373 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.043)       0:04:42.416 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.045)       0:04:42.462 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.047)       0:04:42.509 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.055)       0:04:42.565 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.077)       0:04:42.642 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.099)       0:04:42.741 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:58:11 -0400 (0:00:00.093)       0:04:42.834 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.093)       0:04:42.928 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:197687): WARNING **: 18:58:12.320: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.443)       0:04:43.371 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.120)       0:04:43.491 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.088)       0:04:43.579 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.058)       0:04:43.638 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.068)       0:04:43.706 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.077)       0:04:43.784 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:58:12 -0400 (0:00:00.077)       0:04:43.861 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.076)       0:04:43.938 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.059)       0:04:43.998 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.056)       0:04:44.054 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.054)       0:04:44.108 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.049)       0:04:44.158 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.046)       0:04:44.204 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.128)       0:04:44.332 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.159)       0:04:44.491 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.125)       0:04:44.617 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.156)       0:04:44.774 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:58:13 -0400 (0:00:00.119)       0:04:44.894 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:58:14 -0400 (0:00:00.194)       0:04:45.088 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:58:14 -0400 (0:00:00.115)       0:04:45.203 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:58:14 -0400 (0:00:00.065)       0:04:45.269 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:58:14 -0400 (0:00:00.083)       0:04:45.353 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:58:14 -0400 (0:00:00.170)       0:04:45.524 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:58:15 -0400 (0:00:00.410)       0:04:45.934 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:58:15 -0400 (0:00:00.095)       0:04:46.030 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:58:15 -0400 (0:00:00.267)       0:04:46.298 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.335032",
    "end": "2024-11-02 18:58:16.171612",
    "rc": 0,
    "start": "2024-11-02 18:58:15.836580"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdb",
                        "size": "20938752 sectors",
                        "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdc",
                        "size": "20938752 sectors",
                        "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdd",
                        "size": "2147450880 sectors",
                        "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sde",
                        "size": "2147450880 sectors",
                        "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdf",
                        "size": "20938752 sectors",
                        "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdg",
                        "size": "2147450880 sectors",
                        "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdh",
                        "size": "20938752 sectors",
                        "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdi",
                        "size": "20938752 sectors",
                        "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "af4d9e1f-2295-42be-8575-58097d9869c6"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:58:16 -0400 (0:00:00.918)       0:04:47.217 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdb",
                                "size": "20938752 sectors",
                                "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdc",
                                "size": "20938752 sectors",
                                "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdd",
                                "size": "2147450880 sectors",
                                "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sde",
                                "size": "2147450880 sectors",
                                "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdf",
                                "size": "20938752 sectors",
                                "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdg",
                                "size": "2147450880 sectors",
                                "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdh",
                                "size": "20938752 sectors",
                                "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdi",
                                "size": "20938752 sectors",
                                "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "af4d9e1f-2295-42be-8575-58097d9869c6"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:58:16 -0400 (0:00:00.144)       0:04:47.361 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:58:16 -0400 (0:00:00.193)       0:04:47.555 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:58:16 -0400 (0:00:00.147)       0:04:47.702 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:58:16 -0400 (0:00:00.150)       0:04:47.853 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:58:17 -0400 (0:00:00.141)       0:04:47.995 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:58:17 -0400 (0:00:00.160)       0:04:48.156 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:58:17 -0400 (0:00:00.301)       0:04:48.457 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:58:17 -0400 (0:00:00.132)       0:04:48.589 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:58:18 -0400 (0:00:00.580)       0:04:49.170 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:58:18 -0400 (0:00:00.136)       0:04:49.306 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:58:18 -0400 (0:00:00.230)       0:04:49.537 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:58:18 -0400 (0:00:00.134)       0:04:49.671 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:58:18 -0400 (0:00:00.149)       0:04:49.821 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.100)       0:04:49.922 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.124)       0:04:50.046 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.129)       0:04:50.176 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.121)       0:04:50.297 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.127)       0:04:50.425 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.296)       0:04:50.721 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:58:19 -0400 (0:00:00.124)       0:04:50.846 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.146)       0:04:50.993 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.165)       0:04:51.158 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.149)       0:04:51.308 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.173)       0:04:51.481 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.085)       0:04:51.567 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.093)       0:04:51.660 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.110)       0:04:51.771 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:58:20 -0400 (0:00:00.088)       0:04:51.859 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588282.2885692,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588282.2885692,
        "dev": 5,
        "device_type": 64782,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 6315,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588282.2885692,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:58:21 -0400 (0:00:00.605)       0:04:52.464 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:58:21 -0400 (0:00:00.115)       0:04:52.580 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:58:21 -0400 (0:00:00.088)       0:04:52.669 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:58:21 -0400 (0:00:00.227)       0:04:52.897 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:58:22 -0400 (0:00:00.085)       0:04:52.982 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:58:22 -0400 (0:00:00.103)       0:04:53.086 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:58:22 -0400 (0:00:00.084)       0:04:53.171 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:58:22 -0400 (0:00:00.078)       0:04:53.249 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:58:23 -0400 (0:00:01.369)       0:04:54.618 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:58:23 -0400 (0:00:00.081)       0:04:54.700 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:58:23 -0400 (0:00:00.066)       0:04:54.766 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:58:23 -0400 (0:00:00.081)       0:04:54.848 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:58:23 -0400 (0:00:00.045)       0:04:54.893 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.044)       0:04:54.937 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.049)       0:04:54.986 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.075)       0:04:55.062 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.076)       0:04:55.139 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.290)       0:04:55.430 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.104)       0:04:55.534 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.080)       0:04:55.614 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.066)       0:04:55.680 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.066)       0:04:55.746 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.062)       0:04:55.809 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:58:24 -0400 (0:00:00.047)       0:04:55.856 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.046)       0:04:55.903 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.045)       0:04:55.949 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.046)       0:04:55.995 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.046)       0:04:56.042 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.044)       0:04:56.086 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.046)       0:04:56.132 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.106)       0:04:56.239 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.047)       0:04:56.286 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.048)       0:04:56.335 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.439)       0:04:56.774 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:58:25 -0400 (0:00:00.124)       0:04:56.898 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.116)       0:04:57.015 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.107)       0:04:57.123 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.096)       0:04:57.219 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.069)       0:04:57.289 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.061)       0:04:57.351 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.067)       0:04:57.419 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.088)       0:04:57.507 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.083)       0:04:57.591 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:58:26 -0400 (0:00:00.292)       0:04:57.884 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.105)       0:04:57.990 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.079)       0:04:58.070 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.081)       0:04:58.151 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.078)       0:04:58.230 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.070)       0:04:58.300 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.050)       0:04:58.350 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.397 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.444 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.491 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.047)       0:04:58.538 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.585 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.631 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.046)       0:04:58.678 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:58:27 -0400 (0:00:00.199)       0:04:58.878 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.095)       0:04:58.973 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.087)       0:04:59.061 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.060)       0:04:59.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.057)       0:04:59.178 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.047)       0:04:59.226 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.051)       0:04:59.278 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.052)       0:04:59.330 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.072)       0:04:59.403 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.135)       0:04:59.538 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.169)       0:04:59.708 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.081)       0:04:59.790 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:154
Saturday 02 November 2024  18:58:28 -0400 (0:00:00.088)       0:04:59.878 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.212)       0:05:00.091 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.074)       0:05:00.166 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.074)       0:05:00.240 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.112)       0:05:00.353 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.048)       0:05:00.401 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.047)       0:05:00.449 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.049)       0:05:00.498 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.050)       0:05:00.549 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.119)       0:05:00.668 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.065)       0:05:00.734 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:58:29 -0400 (0:00:00.135)       0:05:00.870 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.052)       0:05:00.922 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.050)       0:05:00.973 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.067)       0:05:01.040 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.080)       0:05:01.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.084)       0:05:01.206 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.097)       0:05:01.304 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:58:30 -0400 (0:00:00.052)       0:05:01.356 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "stratisd",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:58:34 -0400 (0:00:04.083)       0:05:05.440 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:58:34 -0400 (0:00:00.140)       0:05:05.580 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588285.4116027,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "313e5700286f4a87ad8514864f4abd254352f126",
        "ctime": 1730588285.4076028,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588285.4076028,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.573)       0:05:06.155 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.117)       0:05:06.273 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.082)       0:05:06.355 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "stratisd",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.155)       0:05:06.510 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.066)       0:05:06.577 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.050)       0:05:06.628 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:58:35 -0400 (0:00:00.068)       0:05:06.696 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:58:36 -0400 (0:00:00.790)       0:05:07.486 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:58:37 -0400 (0:00:00.589)       0:05:08.076 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:58:37 -0400 (0:00:00.076)       0:05:08.152 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:58:38 -0400 (0:00:00.766)       0:05:08.919 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:58:38 -0400 (0:00:00.449)       0:05:09.369 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:58:38 -0400 (0:00:00.065)       0:05:09.434 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:169
Saturday 02 November 2024  18:58:39 -0400 (0:00:01.084)       0:05:10.519 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:58:39 -0400 (0:00:00.246)       0:05:10.766 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:58:39 -0400 (0:00:00.126)       0:05:10.893 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:58:40 -0400 (0:00:00.120)       0:05:11.013 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-12c040f99ef548a687c0626aa570ad34-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-12c040f99ef548a687c0626aa570ad34-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
        },
        "/dev/mapper/stratis-1-private-19e44d27b95549c088a56aa7e375e9ad-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-19e44d27b95549c088a56aa7e375e9ad-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
        },
        "/dev/mapper/stratis-1-private-2c7cd33c4d044c9d95ed699efbde2b67-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-2c7cd33c4d044c9d95ed699efbde2b67-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
        },
        "/dev/mapper/stratis-1-private-38e8e1a24c8343df9695accb23200b24-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-38e8e1a24c8343df9695accb23200b24-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
        },
        "/dev/mapper/stratis-1-private-720d2792bc9c426e9edb792054c8825f-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-720d2792bc9c426e9edb792054c8825f-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
        },
        "/dev/mapper/stratis-1-private-74ecc8862c824e77abdafaebaa43c474-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-74ecc8862c824e77abdafaebaa43c474-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
        },
        "/dev/mapper/stratis-1-private-85599f9f945f4d34ab2c646d3a692085-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-85599f9f945f4d34ab2c646d3a692085-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-af4d9e1f229542be857558097d9869c6-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-c87682aff4f64f618ca2579c183a9c17-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-c87682aff4f64f618ca2579c183a9c17-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
        },
        "/dev/mapper/stratis-1-private-daccb6fb36d546a3ba9912622586c190-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-daccb6fb36d546a3ba9912622586c190-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "8f2afe5e-5bc4-42a2-9c53-a75306158917"
        },
        "/dev/sdb": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "091e64cf-d60b-4010-8a07-fba82fd55bf8"
        },
        "/dev/sdc": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "7582367e-c2d7-4a18-bb1a-da4603daf18e"
        },
        "/dev/sdd": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "e693543d-fd71-4c92-909c-18811f09ecc5"
        },
        "/dev/sde": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "7a724e5b-21cd-484f-bbd0-d018b0435369"
        },
        "/dev/sdf": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "d14cc918-a16c-4f82-86ed-7c95de8c10a8"
        },
        "/dev/sdg": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "855a3859-de23-493c-9cc7-8f655714fc20"
        },
        "/dev/sdh": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "76d67332-3bc1-4daf-815c-8dd505e1ad0b"
        },
        "/dev/sdi": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "38b23489-c4f0-4785-928a-3c60d8180a86"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:58:40 -0400 (0:00:00.457)       0:05:11.471 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003316",
    "end": "2024-11-02 18:58:40.910259",
    "rc": 0,
    "start": "2024-11-02 18:58:40.906943"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:58:41 -0400 (0:00:00.442)       0:05:11.914 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002926",
    "end": "2024-11-02 18:58:41.349412",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:58:41.346486"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:58:41 -0400 (0:00:00.476)       0:05:12.390 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:58:41 -0400 (0:00:00.281)       0:05:12.672 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:58:41 -0400 (0:00:00.153)       0:05:12.825 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.110)       0:05:12.936 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.126)       0:05:13.062 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.279)       0:05:13.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.187)       0:05:13.529 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.213)       0:05:13.743 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:58:42 -0400 (0:00:00.076)       0:05:13.819 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.114)       0:05:13.934 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.160)       0:05:14.094 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.126)       0:05:14.221 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.098)       0:05:14.320 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.102)       0:05:14.423 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:58:43 -0400 (0:00:00.136)       0:05:14.559 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:199523): WARNING **: 18:58:43.963: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.488)       0:05:15.047 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.141)       0:05:15.189 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.171)       0:05:15.361 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.081)       0:05:15.443 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.195)       0:05:15.638 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.082)       0:05:15.720 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.082)       0:05:15.803 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:58:44 -0400 (0:00:00.052)       0:05:15.856 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.058)       0:05:15.914 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.058)       0:05:15.972 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.047)       0:05:16.020 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.047)       0:05:16.068 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.047)       0:05:16.115 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.049)       0:05:16.165 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.086)       0:05:16.252 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.054)       0:05:16.306 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.156)       0:05:16.462 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.056)       0:05:16.519 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.090)       0:05:16.609 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.083)       0:05:16.693 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.038)       0:05:16.731 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.037)       0:05:16.769 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:58:45 -0400 (0:00:00.049)       0:05:16.818 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:58:46 -0400 (0:00:00.092)       0:05:16.911 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:58:46 -0400 (0:00:00.055)       0:05:16.966 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:58:46 -0400 (0:00:00.223)       0:05:17.190 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.337542",
    "end": "2024-11-02 18:58:46.965346",
    "rc": 0,
    "start": "2024-11-02 18:58:46.627804"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdb",
                        "size": "20938752 sectors",
                        "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdc",
                        "size": "20938752 sectors",
                        "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdd",
                        "size": "2147450880 sectors",
                        "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sde",
                        "size": "2147450880 sectors",
                        "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdf",
                        "size": "20938752 sectors",
                        "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdg",
                        "size": "2147450880 sectors",
                        "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdh",
                        "size": "20938752 sectors",
                        "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdi",
                        "size": "20938752 sectors",
                        "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "af4d9e1f-2295-42be-8575-58097d9869c6"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.782)       0:05:17.973 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "74ecc886-2c82-4e77-abda-faebaa43c474"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdb",
                                "size": "20938752 sectors",
                                "uuid": "19e44d27-b955-49c0-88a5-6aa7e375e9ad"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdc",
                                "size": "20938752 sectors",
                                "uuid": "85599f9f-945f-4d34-ab2c-646d3a692085"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdd",
                                "size": "2147450880 sectors",
                                "uuid": "720d2792-bc9c-426e-9edb-792054c8825f"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sde",
                                "size": "2147450880 sectors",
                                "uuid": "12c040f9-9ef5-48a6-87c0-626aa570ad34"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdf",
                                "size": "20938752 sectors",
                                "uuid": "c87682af-f4f6-4f61-8ca2-579c183a9c17"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdg",
                                "size": "2147450880 sectors",
                                "uuid": "38e8e1a2-4c83-43df-9695-accb23200b24"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdh",
                                "size": "20938752 sectors",
                                "uuid": "daccb6fb-36d5-46a3-ba99-12622586c190"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdi",
                                "size": "20938752 sectors",
                                "uuid": "2c7cd33c-4d04-4c9d-95ed-699efbde2b67"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "af4d9e1f-2295-42be-8575-58097d9869c6"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.120)       0:05:18.093 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.120)       0:05:18.214 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.092)       0:05:18.306 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.083)       0:05:18.389 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.082)       0:05:18.472 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.082)       0:05:18.554 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.136)       0:05:18.690 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:58:47 -0400 (0:00:00.123)       0:05:18.814 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.312)       0:05:19.126 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.051)       0:05:19.178 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.073)       0:05:19.251 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.044)       0:05:19.296 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.071)       0:05:19.367 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.073)       0:05:19.440 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.074)       0:05:19.515 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.058)       0:05:19.573 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.053)       0:05:19.627 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.052)       0:05:19.679 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.046)       0:05:19.725 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.059)       0:05:19.785 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:58:48 -0400 (0:00:00.087)       0:05:19.872 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.097)       0:05:19.970 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.261)       0:05:20.231 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.081)       0:05:20.313 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.056)       0:05:20.369 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.050)       0:05:20.420 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.047)       0:05:20.467 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:58:49 -0400 (0:00:00.058)       0:05:20.525 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588282.2885692,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730588282.2885692,
        "dev": 5,
        "device_type": 64782,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 6315,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730588282.2885692,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.438)       0:05:20.964 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.091)       0:05:21.055 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.080)       0:05:21.136 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.092)       0:05:21.228 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.088)       0:05:21.317 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.088)       0:05:21.405 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.090)       0:05:21.496 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:58:50 -0400 (0:00:00.185)       0:05:21.681 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:58:52 -0400 (0:00:01.358)       0:05:23.039 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.082)       0:05:23.122 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.087)       0:05:23.209 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.124)       0:05:23.333 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.078)       0:05:23.412 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.080)       0:05:23.493 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.082)       0:05:23.575 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.078)       0:05:23.654 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.086)       0:05:23.740 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:58:52 -0400 (0:00:00.126)       0:05:23.867 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.141)       0:05:24.008 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.108)       0:05:24.116 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.211)       0:05:24.328 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.114)       0:05:24.442 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.087)       0:05:24.530 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.083)       0:05:24.614 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.083)       0:05:24.698 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.084)       0:05:24.782 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:58:53 -0400 (0:00:00.082)       0:05:24.865 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.079)       0:05:24.945 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.095)       0:05:25.040 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.083)       0:05:25.124 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.079)       0:05:25.204 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.074)       0:05:25.279 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.058)       0:05:25.337 ***** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:58:54 -0400 (0:00:00.421)       0:05:25.759 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.258)       0:05:26.017 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.081)       0:05:26.099 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.056)       0:05:26.155 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.097)       0:05:26.253 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.079)       0:05:26.333 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.116)       0:05:26.449 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.089)       0:05:26.539 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.072)       0:05:26.611 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.046)       0:05:26.658 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.048)       0:05:26.707 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.050)       0:05:26.758 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:58:55 -0400 (0:00:00.079)       0:05:26.837 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.082)       0:05:26.920 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.328)       0:05:27.248 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.091)       0:05:27.340 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.081)       0:05:27.421 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.076)       0:05:27.498 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.109)       0:05:27.607 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.093)       0:05:27.701 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.100)       0:05:27.802 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:58:56 -0400 (0:00:00.073)       0:05:27.875 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.063)       0:05:27.938 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.057)       0:05:27.996 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.086)       0:05:28.083 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.102)       0:05:28.185 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.113)       0:05:28.299 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.170)       0:05:28.469 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.083)       0:05:28.553 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.103)       0:05:28.657 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.076)       0:05:28.733 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:58:57 -0400 (0:00:00.101)       0:05:28.834 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.084)       0:05:28.919 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.062)       0:05:28.981 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.061)       0:05:29.043 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.038)       0:05:29.081 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:172
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.049)       0:05:29.131 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.111)       0:05:29.243 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.182)       0:05:29.425 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.119)       0:05:29.545 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.141)       0:05:29.687 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.075)       0:05:29.762 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.059)       0:05:29.822 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:58:58 -0400 (0:00:00.060)       0:05:29.883 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.053)       0:05:29.936 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.162)       0:05:30.099 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.081)       0:05:30.181 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.145)       0:05:30.326 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.094)       0:05:30.421 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.065)       0:05:30.487 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.064)       0:05:30.552 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.125)       0:05:30.677 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.050)       0:05:30.727 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.085)       0:05:30.813 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:58:59 -0400 (0:00:00.037)       0:05:30.850 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:59:11 -0400 (0:00:11.220)       0:05:42.071 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:59:11 -0400 (0:00:00.192)       0:05:42.263 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588285.4116027,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "313e5700286f4a87ad8514864f4abd254352f126",
        "ctime": 1730588285.4076028,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588285.4076028,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:59:11 -0400 (0:00:00.634)       0:05:42.898 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:59:12 -0400 (0:00:00.571)       0:05:43.469 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:59:12 -0400 (0:00:00.077)       0:05:43.547 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:59:12 -0400 (0:00:00.103)       0:05:43.650 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:59:12 -0400 (0:00:00.101)       0:05:43.752 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:59:12 -0400 (0:00:00.096)       0:05:43.849 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:59:13 -0400 (0:00:00.647)       0:05:44.496 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:59:14 -0400 (0:00:00.910)       0:05:45.407 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:59:15 -0400 (0:00:00.547)       0:05:45.954 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:59:15 -0400 (0:00:00.178)       0:05:46.132 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:59:16 -0400 (0:00:00.897)       0:05:47.030 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:59:16 -0400 (0:00:00.482)       0:05:47.512 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:59:16 -0400 (0:00:00.096)       0:05:47.608 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:187
Saturday 02 November 2024  18:59:17 -0400 (0:00:01.008)       0:05:48.616 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:59:17 -0400 (0:00:00.079)       0:05:48.696 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:59:17 -0400 (0:00:00.067)       0:05:48.763 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:59:17 -0400 (0:00:00.059)       0:05:48.822 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:59:18 -0400 (0:00:00.369)       0:05:49.192 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004025",
    "end": "2024-11-02 18:59:19.598942",
    "rc": 0,
    "start": "2024-11-02 18:59:18.594917"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:59:19 -0400 (0:00:01.470)       0:05:50.663 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:01.004303",
    "end": "2024-11-02 18:59:21.221541",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:59:20.217238"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:59:21 -0400 (0:00:01.735)       0:05:52.398 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:59:21 -0400 (0:00:00.167)       0:05:52.566 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:59:21 -0400 (0:00:00.062)       0:05:52.628 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:59:21 -0400 (0:00:00.074)       0:05:52.703 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:59:21 -0400 (0:00:00.081)       0:05:52.784 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.174)       0:05:52.959 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.079)       0:05:53.039 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.065)       0:05:53.105 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.098)       0:05:53.203 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.085)       0:05:53.288 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.074)       0:05:53.363 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.124)       0:05:53.487 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:59:22 -0400 (0:00:00.153)       0:05:53.641 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:59:23 -0400 (0:00:00.273)       0:05:53.915 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:59:23 -0400 (0:00:00.063)       0:05:53.979 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:201673): WARNING **: 18:59:23.457: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:59:23 -0400 (0:00:00.588)       0:05:54.568 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:59:23 -0400 (0:00:00.178)       0:05:54.747 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.299)       0:05:55.046 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.119)       0:05:55.166 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.133)       0:05:55.300 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.132)       0:05:55.432 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.122)       0:05:55.554 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.107)       0:05:55.662 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.085)       0:05:55.748 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.077)       0:05:55.825 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:59:24 -0400 (0:00:00.058)       0:05:55.884 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.060)       0:05:55.944 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.182)       0:05:56.126 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.084)       0:05:56.211 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.199)       0:05:56.411 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.099)       0:05:56.510 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.233)       0:05:56.744 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:59:25 -0400 (0:00:00.107)       0:05:56.851 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:59:26 -0400 (0:00:00.186)       0:05:57.038 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:59:26 -0400 (0:00:00.237)       0:05:57.275 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:59:26 -0400 (0:00:00.121)       0:05:57.397 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:59:26 -0400 (0:00:00.102)       0:05:57.499 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:59:26 -0400 (0:00:00.141)       0:05:57.641 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:59:27 -0400 (0:00:00.515)       0:05:58.157 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:59:27 -0400 (0:00:00.166)       0:05:58.324 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:59:27 -0400 (0:00:00.287)       0:05:58.611 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.337481",
    "end": "2024-11-02 18:59:28.446443",
    "rc": 0,
    "start": "2024-11-02 18:59:28.108962"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:59:28 -0400 (0:00:00.877)       0:05:59.489 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:59:28 -0400 (0:00:00.192)       0:05:59.681 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:59:28 -0400 (0:00:00.087)       0:05:59.769 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:59:28 -0400 (0:00:00.124)       0:05:59.893 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:59:29 -0400 (0:00:00.079)       0:05:59.972 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:59:29 -0400 (0:00:00.079)       0:06:00.052 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:59:29 -0400 (0:00:00.111)       0:06:00.163 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=cc0ea903-7499-49e5-9db5-ea7601b8b1c0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  18:59:29 -0400 (0:00:00.190)       0:06:00.354 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  18:59:29 -0400 (0:00:00.131)       0:06:00.486 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.492)       0:06:00.978 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.060)       0:06:01.038 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.076)       0:06:01.114 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.046)       0:06:01.161 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.045)       0:06:01.206 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.045)       0:06:01.251 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.045)       0:06:01.297 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.045)       0:06:01.343 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.044)       0:06:01.387 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.043)       0:06:01.431 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.045)       0:06:01.477 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.048)       0:06:01.525 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.154)       0:06:01.679 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.062)       0:06:01.741 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.075)       0:06:01.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  18:59:30 -0400 (0:00:00.066)       0:06:01.883 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.059)       0:06:01.942 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.055)       0:06:01.997 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.045)       0:06:02.043 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.060)       0:06:02.104 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.427)       0:06:02.532 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.083)       0:06:02.615 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.087)       0:06:02.702 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.078)       0:06:02.780 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  18:59:31 -0400 (0:00:00.082)       0:06:02.863 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  18:59:32 -0400 (0:00:00.106)       0:06:02.970 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  18:59:32 -0400 (0:00:00.152)       0:06:03.123 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  18:59:32 -0400 (0:00:00.240)       0:06:03.363 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  18:59:33 -0400 (0:00:01.333)       0:06:04.697 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  18:59:33 -0400 (0:00:00.075)       0:06:04.772 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  18:59:33 -0400 (0:00:00.082)       0:06:04.855 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.088)       0:06:04.944 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.091)       0:06:05.035 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.093)       0:06:05.129 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.075)       0:06:05.204 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.052)       0:06:05.256 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.044)       0:06:05.301 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.076)       0:06:05.377 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.095)       0:06:05.472 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.107)       0:06:05.580 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.106)       0:06:05.686 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  18:59:34 -0400 (0:00:00.176)       0:06:05.863 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.232)       0:06:06.096 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.096)       0:06:06.192 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.096)       0:06:06.288 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.078)       0:06:06.367 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.093)       0:06:06.460 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.083)       0:06:06.544 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.078)       0:06:06.623 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.075)       0:06:06.698 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.075)       0:06:06.773 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  18:59:35 -0400 (0:00:00.073)       0:06:06.847 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.080)       0:06:06.927 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.122)       0:06:07.050 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.126)       0:06:07.176 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.151)       0:06:07.327 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.086)       0:06:07.414 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.219)       0:06:07.633 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.123)       0:06:07.757 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  18:59:36 -0400 (0:00:00.105)       0:06:07.863 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.089)       0:06:07.952 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.069)       0:06:08.022 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.051)       0:06:08.074 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.065)       0:06:08.139 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.074)       0:06:08.213 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.074)       0:06:08.287 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.082)       0:06:08.370 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.075)       0:06:08.446 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.078)       0:06:08.524 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.077)       0:06:08.601 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.087)       0:06:08.689 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  18:59:37 -0400 (0:00:00.127)       0:06:08.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.351)       0:06:09.168 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.074)       0:06:09.242 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.099)       0:06:09.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.116)       0:06:09.458 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.121)       0:06:09.579 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.129)       0:06:09.709 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  18:59:38 -0400 (0:00:00.096)       0:06:09.806 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.146)       0:06:09.953 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.078)       0:06:10.031 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.080)       0:06:10.111 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.077)       0:06:10.189 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.129)       0:06:10.318 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.103)       0:06:10.421 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.117)       0:06:10.539 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.072)       0:06:10.612 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:59:39 -0400 (0:00:00.251)       0:06:10.863 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.074)       0:06:10.938 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create one Stratis pool on one disk] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:190
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.105)       0:06:11.043 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.201)       0:06:11.244 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.170)       0:06:11.415 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.156)       0:06:11.571 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.197)       0:06:11.768 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:59:40 -0400 (0:00:00.112)       0:06:11.881 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.091)       0:06:11.972 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.129)       0:06:12.102 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.099)       0:06:12.202 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.495)       0:06:12.697 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.101)       0:06:12.798 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": "sda",
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  18:59:41 -0400 (0:00:00.095)       0:06:12.894 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.090)       0:06:12.985 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.083)       0:06:13.069 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.081)       0:06:13.150 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.081)       0:06:13.232 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.072)       0:06:13.304 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.099)       0:06:13.404 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  18:59:42 -0400 (0:00:00.040)       0:06:13.445 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/stratis/foo"
    ],
    "mounts": [],
    "packages": [
        "stratis-cli",
        "stratisd",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  18:59:45 -0400 (0:00:02.525)       0:06:15.970 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  18:59:45 -0400 (0:00:00.140)       0:06:16.110 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588354.2243395,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588353.442331,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588353.442331,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  18:59:45 -0400 (0:00:00.520)       0:06:16.631 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.495)       0:06:17.126 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.060)       0:06:17.187 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/stratis/foo"
        ],
        "mounts": [],
        "packages": [
            "stratis-cli",
            "stratisd",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.302)       0:06:17.490 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.083)       0:06:17.573 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.096)       0:06:17.670 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.118)       0:06:17.788 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  18:59:46 -0400 (0:00:00.080)       0:06:17.869 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  18:59:47 -0400 (0:00:00.144)       0:06:18.013 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  18:59:47 -0400 (0:00:00.134)       0:06:18.147 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  18:59:47 -0400 (0:00:00.119)       0:06:18.267 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  18:59:47 -0400 (0:00:00.563)       0:06:18.830 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  18:59:48 -0400 (0:00:00.081)       0:06:18.912 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:199
Saturday 02 November 2024  18:59:49 -0400 (0:00:01.196)       0:06:20.109 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  18:59:49 -0400 (0:00:00.242)       0:06:20.352 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  18:59:49 -0400 (0:00:00.391)       0:06:20.743 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  18:59:49 -0400 (0:00:00.151)       0:06:20.895 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thindata",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thinmeta",
            "size": "6M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-physical-originsub",
            "size": "10G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-thinpool-pool",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  18:59:50 -0400 (0:00:00.496)       0:06:21.391 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002951",
    "end": "2024-11-02 18:59:50.883819",
    "rc": 0,
    "start": "2024-11-02 18:59:50.880868"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.550)       0:06:21.942 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003023",
    "end": "2024-11-02 18:59:51.464606",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 18:59:51.461583"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.516)       0:06:22.459 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.170)       0:06:22.629 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.080)       0:06:22.710 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.077)       0:06:22.788 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  18:59:51 -0400 (0:00:00.078)       0:06:22.867 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.160)       0:06:23.027 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.074)       0:06:23.102 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.061)       0:06:23.163 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.195)       0:06:23.358 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.083)       0:06:23.442 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.172)       0:06:23.615 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.104)       0:06:23.719 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  18:59:52 -0400 (0:00:00.097)       0:06:23.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.085)       0:06:23.903 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.062)       0:06:23.965 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:203129): WARNING **: 18:59:53.344: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.454)       0:06:24.419 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.095)       0:06:24.514 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.141)       0:06:24.656 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.073)       0:06:24.730 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.080)       0:06:24.810 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  18:59:53 -0400 (0:00:00.080)       0:06:24.891 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.079)       0:06:24.971 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.184)       0:06:25.155 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.077)       0:06:25.233 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.075)       0:06:25.309 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.104)       0:06:25.414 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.076)       0:06:25.491 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.075)       0:06:25.566 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.077)       0:06:25.644 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.148)       0:06:25.792 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  18:59:54 -0400 (0:00:00.061)       0:06:25.854 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.142)       0:06:25.996 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.061)       0:06:26.058 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.344)       0:06:26.402 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.154)       0:06:26.557 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.078)       0:06:26.635 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.070)       0:06:26.705 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  18:59:55 -0400 (0:00:00.120)       0:06:26.826 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  18:59:56 -0400 (0:00:00.276)       0:06:27.102 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  18:59:56 -0400 (0:00:00.067)       0:06:27.170 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  18:59:56 -0400 (0:00:00.203)       0:06:27.374 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.339047",
    "end": "2024-11-02 18:59:57.232827",
    "rc": 0,
    "start": "2024-11-02 18:59:56.893780"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "48f6aaba-ce80-4e19-996c-114f78782862"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  18:59:57 -0400 (0:00:00.856)       0:06:28.231 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "48f6aaba-ce80-4e19-996c-114f78782862"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  18:59:57 -0400 (0:00:00.118)       0:06:28.349 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  18:59:57 -0400 (0:00:00.157)       0:06:28.506 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  18:59:57 -0400 (0:00:00.129)       0:06:28.636 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  18:59:57 -0400 (0:00:00.092)       0:06:28.728 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.213)       0:06:28.942 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.100)       0:06:29.043 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.062)       0:06:29.105 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.069)       0:06:29.174 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Add the second disk to the pool] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:202
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.092)       0:06:29.267 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.221)       0:06:29.489 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.131)       0:06:29.620 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  18:59:58 -0400 (0:00:00.143)       0:06:29.763 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.222)       0:06:29.986 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.082)       0:06:30.069 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.078)       0:06:30.148 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.089)       0:06:30.237 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.332)       0:06:30.569 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.207)       0:06:30.777 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  18:59:59 -0400 (0:00:00.084)       0:06:30.862 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.087)       0:06:30.949 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.084)       0:06:31.034 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.078)       0:06:31.112 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.077)       0:06:31.190 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.077)       0:06:31.267 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.086)       0:06:31.354 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.152)       0:06:31.506 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:00:00 -0400 (0:00:00.067)       0:06:31.574 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "add container member",
            "device": "/dev/sdb",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs",
        "stratisd",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:00:03 -0400 (0:00:02.614)       0:06:34.189 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:00:03 -0400 (0:00:00.091)       0:06:34.280 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588354.2243395,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588353.442331,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588353.442331,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:00:03 -0400 (0:00:00.474)       0:06:34.755 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.523)       0:06:35.278 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.057)       0:06:35.335 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "add container member",
                "device": "/dev/sdb",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs",
            "stratisd",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.062)       0:06:35.398 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.083)       0:06:35.481 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.089)       0:06:35.571 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.093)       0:06:35.665 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.082)       0:06:35.747 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:00:04 -0400 (0:00:00.137)       0:06:35.884 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:00:05 -0400 (0:00:00.203)       0:06:36.088 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:00:05 -0400 (0:00:00.140)       0:06:36.229 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:00:05 -0400 (0:00:00.494)       0:06:36.724 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:00:05 -0400 (0:00:00.034)       0:06:36.758 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:211
Saturday 02 November 2024  19:00:06 -0400 (0:00:01.027)       0:06:37.786 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:00:07 -0400 (0:00:00.322)       0:06:38.108 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:00:07 -0400 (0:00:00.091)       0:06:38.200 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:00:07 -0400 (0:00:00.080)       0:06:38.281 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thindata",
            "size": "19.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-flex-thinmeta",
            "size": "9M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-physical-originsub",
            "size": "20G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-48f6aabace804e19996c114f78782862-thinpool-pool",
            "size": "19.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "4fee7de6-3e39-4815-a5cf-eb0d0b23d848"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:00:07 -0400 (0:00:00.470)       0:06:38.751 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003015",
    "end": "2024-11-02 19:00:08.223860",
    "rc": 0,
    "start": "2024-11-02 19:00:08.220845"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:00:08 -0400 (0:00:00.482)       0:06:39.233 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002947",
    "end": "2024-11-02 19:00:08.720948",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:00:08.718001"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:00:08 -0400 (0:00:00.478)       0:06:39.712 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:00:08 -0400 (0:00:00.151)       0:06:39.864 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.076)       0:06:39.940 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.071)       0:06:40.011 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.092)       0:06:40.104 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.162)       0:06:40.266 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.173)       0:06:40.439 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.081)       0:06:40.521 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.108)       0:06:40.629 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.119)       0:06:40.749 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:00:09 -0400 (0:00:00.120)       0:06:40.869 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.109)       0:06:40.979 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.089)       0:06:41.069 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.099)       0:06:41.169 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.059)       0:06:41.229 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:204365): WARNING **: 19:00:10.613: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.460)       0:06:41.689 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:00:10 -0400 (0:00:00.103)       0:06:41.793 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.166)       0:06:41.959 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.076)       0:06:42.036 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.073)       0:06:42.109 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.210)       0:06:42.320 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.086)       0:06:42.406 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.080)       0:06:42.486 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.075)       0:06:42.561 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.092)       0:06:42.654 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:00:11 -0400 (0:00:00.133)       0:06:42.787 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.123)       0:06:42.911 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.113)       0:06:43.024 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.121)       0:06:43.145 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.181)       0:06:43.327 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.063)       0:06:43.391 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.166)       0:06:43.557 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:00:12 -0400 (0:00:00.095)       0:06:43.653 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.444)       0:06:44.098 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.152)       0:06:44.250 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.067)       0:06:44.318 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.094)       0:06:44.413 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.098)       0:06:44.511 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.149)       0:06:44.661 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:00:13 -0400 (0:00:00.059)       0:06:44.721 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:00:14 -0400 (0:00:00.184)       0:06:44.905 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.335827",
    "end": "2024-11-02 19:00:14.664081",
    "rc": 0,
    "start": "2024-11-02 19:00:14.328254"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "4fee7de6-3e39-4815-a5cf-eb0d0b23d848"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "48f6aaba-ce80-4e19-996c-114f78782862"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:00:14 -0400 (0:00:00.810)       0:06:45.716 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "30a6b361-96a2-45a7-b18c-1a82d84c3bd4"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "4fee7de6-3e39-4815-a5cf-eb0d0b23d848"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "48f6aaba-ce80-4e19-996c-114f78782862"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:00:14 -0400 (0:00:00.152)       0:06:45.869 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.126)       0:06:45.995 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.253)       0:06:46.249 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.117)       0:06:46.367 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.114)       0:06:46.481 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.108)       0:06:46.589 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.058)       0:06:46.647 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.055)       0:06:46.703 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:214
Saturday 02 November 2024  19:00:15 -0400 (0:00:00.074)       0:06:46.777 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.275)       0:06:47.053 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.147)       0:06:47.200 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.110)       0:06:47.311 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.201)       0:06:47.512 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.106)       0:06:47.619 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:00:16 -0400 (0:00:00.266)       0:06:47.885 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.085)       0:06:47.970 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.147)       0:06:48.118 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.266)       0:06:48.384 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.121)       0:06:48.506 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.086)       0:06:48.592 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.145)       0:06:48.738 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:00:17 -0400 (0:00:00.106)       0:06:48.845 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:00:18 -0400 (0:00:00.073)       0:06:48.918 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:00:18 -0400 (0:00:00.100)       0:06:49.019 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:00:18 -0400 (0:00:00.073)       0:06:49.092 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:00:18 -0400 (0:00:00.161)       0:06:49.253 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:00:18 -0400 (0:00:00.082)       0:06:49.335 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "",
                    "_mount_id": "",
                    "_raw_device": "",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:00:21 -0400 (0:00:03.072)       0:06:52.408 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:00:21 -0400 (0:00:00.397)       0:06:52.805 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588354.2243395,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588353.442331,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588353.442331,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:00:22 -0400 (0:00:00.603)       0:06:53.409 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.555)       0:06:53.965 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.068)       0:06:54.034 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "",
                        "_mount_id": "",
                        "_raw_device": "",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.095)       0:06:54.129 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "",
                        "_mount_id": "",
                        "_raw_device": "",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.128)       0:06:54.257 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.097)       0:06:54.354 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.108)       0:06:54.462 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.078)       0:06:54.541 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.107)       0:06:54.648 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:00:23 -0400 (0:00:00.189)       0:06:54.838 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:00:24 -0400 (0:00:00.168)       0:06:55.006 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:00:24 -0400 (0:00:00.524)       0:06:55.530 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:00:24 -0400 (0:00:00.061)       0:06:55.592 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:229
Saturday 02 November 2024  19:00:26 -0400 (0:00:01.317)       0:06:56.909 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:00:26 -0400 (0:00:00.205)       0:06:57.114 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "",
                    "_mount_id": "",
                    "_raw_device": "",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:00:26 -0400 (0:00:00.205)       0:06:57.320 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:00:26 -0400 (0:00:00.129)       0:06:57.449 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:00:27 -0400 (0:00:00.510)       0:06:57.959 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003458",
    "end": "2024-11-02 19:00:27.358148",
    "rc": 0,
    "start": "2024-11-02 19:00:27.354690"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:00:27 -0400 (0:00:00.402)       0:06:58.362 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002898",
    "end": "2024-11-02 19:00:27.844153",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:00:27.841255"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:00:27 -0400 (0:00:00.511)       0:06:58.874 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.242)       0:06:59.116 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.151)       0:06:59.268 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.140)       0:06:59.409 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.110)       0:06:59.520 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.296)       0:06:59.816 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:00:28 -0400 (0:00:00.073)       0:06:59.889 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.057)       0:06:59.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.069)       0:07:00.016 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.069)       0:07:00.086 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.070)       0:07:00.156 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.069)       0:07:00.226 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.074)       0:07:00.301 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.082)       0:07:00.383 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:00:29 -0400 (0:00:00.058)       0:07:00.442 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:205563): WARNING **: 19:00:29.809: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.498)       0:07:00.940 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.150)       0:07:01.091 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.168)       0:07:01.260 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.223)       0:07:01.483 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.073)       0:07:01.557 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.083)       0:07:01.641 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.112)       0:07:01.753 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:00:30 -0400 (0:00:00.138)       0:07:01.892 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.122)       0:07:02.015 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.125)       0:07:02.140 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.125)       0:07:02.266 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.118)       0:07:02.384 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.135)       0:07:02.520 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.112)       0:07:02.633 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.168)       0:07:02.802 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:00:31 -0400 (0:00:00.084)       0:07:02.886 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.229)       0:07:03.115 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.085)       0:07:03.200 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.145)       0:07:03.345 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.107)       0:07:03.453 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.086)       0:07:03.540 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.060)       0:07:03.600 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.074)       0:07:03.675 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.102)       0:07:03.777 ***** 
skipping: [managed-node2] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:00:32 -0400 (0:00:00.064)       0:07:03.842 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:00:33 -0400 (0:00:00.154)       0:07:03.996 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.339182",
    "end": "2024-11-02 19:00:33.809577",
    "rc": 0,
    "start": "2024-11-02 19:00:33.470395"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:00:33 -0400 (0:00:00.855)       0:07:04.851 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.249)       0:07:05.101 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.085)       0:07:05.187 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.083)       0:07:05.270 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.117)       0:07:05.388 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.111)       0:07:05.499 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.091)       0:07:05.590 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:00:34 -0400 (0:00:00.218)       0:07:05.809 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.162)       0:07:05.971 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.351)       0:07:06.323 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": ""
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.088)       0:07:06.412 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.171)       0:07:06.583 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.085)       0:07:06.669 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:00:35 -0400 (0:00:00.215)       0:07:06.885 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.079)       0:07:06.964 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.109)       0:07:07.074 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.121)       0:07:07.195 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.101)       0:07:07.297 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.099)       0:07:07.396 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.080)       0:07:07.477 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.079)       0:07:07.556 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.159)       0:07:07.716 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:00:36 -0400 (0:00:00.125)       0:07:07.842 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.124)       0:07:07.966 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.150)       0:07:08.116 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.083)       0:07:08.200 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.094)       0:07:08.295 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.069)       0:07:08.365 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:00:37 -0400 (0:00:00.165)       0:07:08.530 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.525)       0:07:09.055 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.111)       0:07:09.167 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.103)       0:07:09.270 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.144)       0:07:09.414 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.126)       0:07:09.541 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.131)       0:07:09.673 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.090)       0:07:09.764 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:00:38 -0400 (0:00:00.070)       0:07:09.834 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:00:40 -0400 (0:00:01.444)       0:07:11.279 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:00:40 -0400 (0:00:00.138)       0:07:11.418 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:00:40 -0400 (0:00:00.110)       0:07:11.528 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:00:40 -0400 (0:00:00.112)       0:07:11.641 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:00:40 -0400 (0:00:00.133)       0:07:11.774 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.129)       0:07:11.904 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.343)       0:07:12.247 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.103)       0:07:12.351 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.076)       0:07:12.427 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.114)       0:07:12.542 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.115)       0:07:12.657 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.106)       0:07:12.764 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:00:41 -0400 (0:00:00.103)       0:07:12.868 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.100)       0:07:12.968 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.075)       0:07:13.044 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.072)       0:07:13.117 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.080)       0:07:13.197 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.070)       0:07:13.267 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.070)       0:07:13.338 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.073)       0:07:13.412 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.166)       0:07:13.578 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.070)       0:07:13.649 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.071)       0:07:13.721 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.068)       0:07:13.789 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:00:42 -0400 (0:00:00.048)       0:07:13.837 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.077)       0:07:13.915 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.065)       0:07:13.981 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.059)       0:07:14.040 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.044)       0:07:14.085 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.077)       0:07:14.162 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.127)       0:07:14.289 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.099)       0:07:14.389 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.100)       0:07:14.490 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.096)       0:07:14.587 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.073)       0:07:14.660 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.144)       0:07:14.804 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:00:43 -0400 (0:00:00.065)       0:07:14.870 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.071)       0:07:14.941 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.071)       0:07:15.013 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.067)       0:07:15.080 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.046)       0:07:15.127 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.055)       0:07:15.182 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.049)       0:07:15.231 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.053)       0:07:15.285 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.041)       0:07:15.326 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.041)       0:07:15.367 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.046)       0:07:15.414 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.062)       0:07:15.476 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.060)       0:07:15.537 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.057)       0:07:15.594 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.126)       0:07:15.720 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.063)       0:07:15.784 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.042)       0:07:15.827 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:00:44 -0400 (0:00:00.045)       0:07:15.873 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.068)       0:07:15.941 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.072)       0:07:16.013 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.089)       0:07:16.103 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.094)       0:07:16.197 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.122)       0:07:16.319 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.125)       0:07:16.444 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.113)       0:07:16.558 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Setup Tang server on localhost for testing] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:232
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.101)       0:07:16.660 ***** 
included: fedora.linux_system_roles.nbde_server for managed-node2

TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6
Saturday 02 November 2024  19:00:45 -0400 (0:00:00.236)       0:07:16.896 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2
Saturday 02 November 2024  19:00:46 -0400 (0:00:00.295)       0:07:17.192 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10
Saturday 02 November 2024  19:00:46 -0400 (0:00:00.133)       0:07:17.325 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15
Saturday 02 November 2024  19:00:46 -0400 (0:00:00.448)       0:07:17.774 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__nbde_server_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19
Saturday 02 November 2024  19:00:46 -0400 (0:00:00.085)       0:07:17.860 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__nbde_server_cachedir": "/var/cache/tang",
        "__nbde_server_group": "tang",
        "__nbde_server_keydir": "/var/db/tang",
        "__nbde_server_keygen": "/usr/libexec/tangd-keygen",
        "__nbde_server_packages": [
            "tang"
        ],
        "__nbde_server_services": [
            "tangd.socket"
        ],
        "__nbde_server_update": "/usr/libexec/tangd-update",
        "__nbde_server_user": "tang"
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml"
    ],
    "changed": false
}

TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9
Saturday 02 November 2024  19:00:47 -0400 (0:00:00.119)       0:07:17.980 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node2

TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2
Saturday 02 November 2024  19:00:47 -0400 (0:00:00.152)       0:07:18.132 ***** 
changed: [managed-node2] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: http-parser-2.9.4-6.el9.x86_64",
        "Installed: tang-14-2.el9.x86_64"
    ]
}
lsrpackages: tang

TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] *********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8
Saturday 02 November 2024  19:00:49 -0400 (0:00:02.639)       0:07:20.771 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "nbde_server_rotate_keys | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] *************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17
Saturday 02 November 2024  19:00:50 -0400 (0:00:00.172)       0:07:20.944 ***** 
changed: [managed-node2] => {
    "arguments": {
        "cachedir": "/var/cache/tang",
        "force": false,
        "keydir": "/var/db/tang",
        "keygen": "/usr/libexec/tangd-keygen",
        "keys_to_deploy_dir": null,
        "state": "keys-created",
        "update": "/usr/libexec/tangd-update"
    },
    "changed": true,
    "state": "keys-created"
}

TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26
Saturday 02 November 2024  19:00:50 -0400 (0:00:00.640)       0:07:21.584 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30
Saturday 02 November 2024  19:00:50 -0400 (0:00:00.130)       0:07:21.715 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node2

TASK [Ensure tang port is labeled tangd_port_t for SELinux] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2
Saturday 02 November 2024  19:00:50 -0400 (0:00:00.140)       0:07:21.855 ***** 
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean
included: fedora.linux_system_roles.selinux for managed-node2

TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2
Saturday 02 November 2024  19:00:51 -0400 (0:00:00.310)       0:07:22.166 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2

TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2
Saturday 02 November 2024  19:00:51 -0400 (0:00:00.142)       0:07:22.308 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] *************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7
Saturday 02 November 2024  19:00:51 -0400 (0:00:00.120)       0:07:22.429 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2

TASK [fedora.linux_system_roles.selinux : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5
Saturday 02 November 2024  19:00:51 -0400 (0:00:00.144)       0:07:22.574 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10
Saturday 02 November 2024  19:00:52 -0400 (0:00:00.444)       0:07:23.018 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__selinux_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17
Saturday 02 November 2024  19:00:52 -0400 (0:00:00.095)       0:07:23.114 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22
Saturday 02 November 2024  19:00:52 -0400 (0:00:00.495)       0:07:23.609 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__selinux_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26
Saturday 02 November 2024  19:00:52 -0400 (0:00:00.122)       0:07:23.732 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "ansible_python_version is version('3', '<')",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35
Saturday 02 November 2024  19:00:52 -0400 (0:00:00.079)       0:07:23.811 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: python3-libselinux python3-policycoreutils

TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46
Saturday 02 November 2024  19:00:54 -0400 (0:00:01.323)       0:07:25.134 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "ansible_os_family == \"Suse\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58
Saturday 02 November 2024  19:00:54 -0400 (0:00:00.076)       0:07:25.211 ***** 
changed: [managed-node2] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: policycoreutils-python-utils-3.6-2.1.el9.noarch"
    ]
}
lsrpackages: policycoreutils-python-utils

TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72
Saturday 02 November 2024  19:00:56 -0400 (0:00:02.019)       0:07:27.231 ***** 
skipping: [managed-node2] => {
    "false_condition": "__selinux_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77
Saturday 02 November 2024  19:00:56 -0400 (0:00:00.100)       0:07:27.332 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__selinux_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82
Saturday 02 November 2024  19:00:56 -0400 (0:00:00.102)       0:07:27.434 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__selinux_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Refresh facts] ***********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89
Saturday 02 November 2024  19:00:56 -0400 (0:00:00.104)       0:07:27.539 ***** 
ok: [managed-node2]

TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5
Saturday 02 November 2024  19:00:57 -0400 (0:00:01.033)       0:07:28.572 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13
Saturday 02 November 2024  19:00:57 -0400 (0:00:00.070)       0:07:28.643 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] *********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21
Saturday 02 November 2024  19:00:57 -0400 (0:00:00.090)       0:07:28.734 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "selinux_reboot_required": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] **********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25
Saturday 02 November 2024  19:00:57 -0400 (0:00:00.145)       0:07:28.879 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_reboot_required",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] *********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.114)       0:07:28.994 ***** 
skipping: [managed-node2] => {
    "false_condition": "ansible_selinux.status == \"disabled\""
}

TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.096)       0:07:29.091 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_all_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.156)       0:07:29.247 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_booleans_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.174)       0:07:29.422 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_fcontexts_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.123)       0:07:29.545 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_ports_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.113)       0:07:29.659 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_logins_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63
Saturday 02 November 2024  19:00:58 -0400 (0:00:00.100)       0:07:29.759 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74
Saturday 02 November 2024  19:00:59 -0400 (0:00:00.213)       0:07:29.972 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87
Saturday 02 November 2024  19:00:59 -0400 (0:00:00.086)       0:07:30.058 ***** 
changed: [managed-node2] => (item={'ports': 7500, 'proto': 'tcp', 'setype': 'tangd_port_t', 'state': 'present', 'local': True}) => {
    "__selinux_item": {
        "local": true,
        "ports": 7500,
        "proto": "tcp",
        "setype": "tangd_port_t",
        "state": "present"
    },
    "ansible_loop_var": "__selinux_item",
    "changed": true,
    "ports": [
        "7500"
    ],
    "proto": "tcp",
    "setype": "tangd_port_t",
    "state": "present"
}

TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99
Saturday 02 November 2024  19:01:01 -0400 (0:00:02.401)       0:07:32.460 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112
Saturday 02 November 2024  19:01:01 -0400 (0:00:00.036)       0:07:32.497 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "selinux_checksums": true,
        "selinux_installed_modules": {
            "abrt": {
                "100": {
                    "checksum": "sha256:b51e3b6f704ac00c5bba8ac275f854f6d53a5e3a4eb5c5cbbc4b11004feca510",
                    "enabled": 1
                }
            },
            "accountsd": {
                "100": {
                    "checksum": "sha256:00a0d1af3601693070f723e0deaea1ee28734bde5ef4895843a1ba59cd7ceaa5",
                    "enabled": 1
                }
            },
            "acct": {
                "100": {
                    "checksum": "sha256:9cde24137141f8c2a8fcee6dd15537f0f54198da14360a30655a0606e4ba2818",
                    "enabled": 1
                }
            },
            "afs": {
                "100": {
                    "checksum": "sha256:93c409c37386aacafd2d288f053c13e2a092696358aa991db322507eb9f4838b",
                    "enabled": 1
                }
            },
            "afterburn": {
                "100": {
                    "checksum": "sha256:8c0ef816b475cee987383fac53e50be5350b237033c893df8e267579e35eb726",
                    "enabled": 1
                }
            },
            "aiccu": {
                "100": {
                    "checksum": "sha256:1433f430bc89a874d46961861e1da8520158d6794a0b778e86b88fe13f71798a",
                    "enabled": 1
                }
            },
            "aide": {
                "100": {
                    "checksum": "sha256:35a074f33aee562b559a5cb76637dc4602a70c70f573ec4efe3a4274cb3e3a75",
                    "enabled": 1
                }
            },
            "ajaxterm": {
                "100": {
                    "checksum": "sha256:a2afd368cdffc8ec7c2dccd978bda04ec9bcec8cc1d133521e526dbe196c5f90",
                    "enabled": 1
                }
            },
            "alsa": {
                "100": {
                    "checksum": "sha256:be7eaf8e5da7b69d7d41422771e7e3692d41148f3e3c4e78b7b03d8117ddb2c6",
                    "enabled": 1
                }
            },
            "amanda": {
                "100": {
                    "checksum": "sha256:84287ea4da0ed7b94b142d7b494666232e7a25209471ff689c7806866ce93f92",
                    "enabled": 1
                }
            },
            "amtu": {
                "100": {
                    "checksum": "sha256:ee038524634638e4842fd478cf60861765476498940c7516ced038c6a9161282",
                    "enabled": 1
                }
            },
            "anaconda": {
                "100": {
                    "checksum": "sha256:4292aa3daa6a027612e80073403b822d55297b812dd9455e2842d8a89a563e25",
                    "enabled": 1
                }
            },
            "antivirus": {
                "100": {
                    "checksum": "sha256:75e3f196fca58dc489cf92c4fa8be3decec0bcedcb2008cb7a83df5bc00d5850",
                    "enabled": 1
                }
            },
            "apache": {
                "100": {
                    "checksum": "sha256:60e843a42569cd6efeb7da329e9593960e752c111dfa1df6874fe22f26d0a3a1",
                    "enabled": 1
                }
            },
            "apcupsd": {
                "100": {
                    "checksum": "sha256:a00445c04d0003fce006ff5529e988671c09286372f803b2545bebc5302b71f4",
                    "enabled": 1
                }
            },
            "apm": {
                "100": {
                    "checksum": "sha256:083e097b9b3ac106fb9e9b0fc7b5a830b70a85a321ee913e0a3ce5a262d13779",
                    "enabled": 1
                }
            },
            "application": {
                "100": {
                    "checksum": "sha256:46d9417dcf9bb31c077dc7ad25d8ac315fed23a2f0e69b00460c449534932765",
                    "enabled": 1
                }
            },
            "arpwatch": {
                "100": {
                    "checksum": "sha256:286406a4369a79a0e868d0e73699ee829431f534733e527bf91b6363486b00ed",
                    "enabled": 1
                }
            },
            "asterisk": {
                "100": {
                    "checksum": "sha256:3c093f028ebb0a01031a7a03233c24fb3ba41c693d3246b3275337289b20251c",
                    "enabled": 1
                }
            },
            "auditadm": {
                "100": {
                    "checksum": "sha256:6b85c025edaad2949d75c1b6ea59a88beb8ae28112bd756c1b4d12417fd262ba",
                    "enabled": 1
                }
            },
            "authconfig": {
                "100": {
                    "checksum": "sha256:be2114af9cfa429328d94fd44aeb34a5f94a6dab76deb20f3b9cea7182dd1343",
                    "enabled": 1
                }
            },
            "authlogin": {
                "100": {
                    "checksum": "sha256:61b6b0c427f5c86b16f77231ce3aa989c8ef7b40bcefef99d8f7518d82bd8bc9",
                    "enabled": 1
                }
            },
            "automount": {
                "100": {
                    "checksum": "sha256:7c1bc02c72f6a9b86f6cb6b4e8307428346706171510fb17f1e09bf1c7600d35",
                    "enabled": 1
                }
            },
            "avahi": {
                "100": {
                    "checksum": "sha256:ce150ead441c9ccbe062a9f7b868ff893eb427ff6a7bee500977699c42b89286",
                    "enabled": 1
                }
            },
            "awstats": {
                "100": {
                    "checksum": "sha256:70fa46fcee1c327fbb59d24b26bf92aeb641705bb6821d29ee9a8c213b5822b0",
                    "enabled": 1
                }
            },
            "bacula": {
                "100": {
                    "checksum": "sha256:7ff04b28e1684fa4f655051692015501aa0f0ad46edd79cadcdf7020fa2e66aa",
                    "enabled": 1
                }
            },
            "base": {
                "100": {
                    "checksum": "sha256:5f771ae506cadab52ba964b0218700f2ec4537102537b6af270eaea5dc43d9e4",
                    "enabled": 1
                }
            },
            "bcfg2": {
                "100": {
                    "checksum": "sha256:a38aef4d9df7de368ce8f26c374fdd7c2d6c7f3a12b07c706265f35b40de7308",
                    "enabled": 1
                }
            },
            "bind": {
                "100": {
                    "checksum": "sha256:03544e87d5e1caca5b6b69fb627b482f44bfa63f49cf5a08245ff08fb9982809",
                    "enabled": 1
                }
            },
            "bitlbee": {
                "100": {
                    "checksum": "sha256:eb7f9de13d74d31a934e7c58cf1a34b428d5695ad1f552d43af73ddfdf15f3e3",
                    "enabled": 1
                }
            },
            "blkmapd": {
                "100": {
                    "checksum": "sha256:b75ceb3e1f47bed84573b3d82d4dbe9023adb0a9a40fc2ddba393addd5f9a1d8",
                    "enabled": 1
                }
            },
            "blueman": {
                "100": {
                    "checksum": "sha256:a2645f50db0b15fac85915686e84d9e1ad347d72d77790f67f9e630d73ecaa13",
                    "enabled": 1
                }
            },
            "bluetooth": {
                "100": {
                    "checksum": "sha256:370a05f9a92e9a3359ed79b853c1719b594592cd5541c56023a92bbc87764270",
                    "enabled": 1
                }
            },
            "boinc": {
                "100": {
                    "checksum": "sha256:354fd26e2ef0e4c35edb3aa0bb068ee269f9d49f8180e0ebca24d06039aa8e0c",
                    "enabled": 1
                }
            },
            "boltd": {
                "100": {
                    "checksum": "sha256:39592577c657fe9b8720c7c8fc0a382b56377cc6c8eff3d3068d276552467e4f",
                    "enabled": 1
                }
            },
            "boothd": {
                "100": {
                    "checksum": "sha256:26c41cbbcfb1fb2a91700612a3e78d7d52300ebf1415cda27364f143f11a382d",
                    "enabled": 1
                }
            },
            "bootloader": {
                "100": {
                    "checksum": "sha256:21a4ec8c7068672a02d49a4ca8721a7d3e661d1c704908e04b7abc9c955e2343",
                    "enabled": 1
                }
            },
            "bootupd": {
                "100": {
                    "checksum": "sha256:c707603b2556dd53524e160e5c66150c18ffc0b4668b2404622282cd2925ddc7",
                    "enabled": 1
                }
            },
            "brctl": {
                "100": {
                    "checksum": "sha256:36617fd6db4ebf3850675aeea3e096fa59a4bfdcb64613f93f8651b6f61db3cb",
                    "enabled": 1
                }
            },
            "brltty": {
                "100": {
                    "checksum": "sha256:5c24db4b697cf2406a8c713240ffb78d953ad090e6287b842f86dffe1290ec26",
                    "enabled": 1
                }
            },
            "bugzilla": {
                "100": {
                    "checksum": "sha256:496bef2bede30915da26b4136fb2f34becdcbc87925e0d44564667901b75d658",
                    "enabled": 1
                }
            },
            "bumblebee": {
                "100": {
                    "checksum": "sha256:0e7317ff234ccbf01bac4d63543ed7babffd63081d4f64100c2bc5f35c3d288e",
                    "enabled": 1
                }
            },
            "cachefilesd": {
                "100": {
                    "checksum": "sha256:3246918462bd57ae9d412e4177a59735624f031c7c5bd79254d0e4ab8c799045",
                    "enabled": 1
                }
            },
            "calamaris": {
                "100": {
                    "checksum": "sha256:22eee16dbfbf19e6c7e3e8a22ecf7794a4667b9c0fe38de1b7892939e5383a9a",
                    "enabled": 1
                }
            },
            "callweaver": {
                "100": {
                    "checksum": "sha256:db38c31ae173d049f813fdcc8019651b1ea662ec270fa27943d851bb4e6fe951",
                    "enabled": 1
                }
            },
            "canna": {
                "100": {
                    "checksum": "sha256:e752c21e4c76ead1a95b3858a36df5c9fe1dcfc2fa72b6e3337db501b255ed14",
                    "enabled": 1
                }
            },
            "ccs": {
                "100": {
                    "checksum": "sha256:2ed6f511d59167652141d928236900e08ac58f0347d5b13e384282f0c9d4bd7c",
                    "enabled": 1
                }
            },
            "cdrecord": {
                "100": {
                    "checksum": "sha256:eb4506711c4aa4449d76ceff06bd4728a67b150fa6daa449329af99d55a7e05f",
                    "enabled": 1
                }
            },
            "certmaster": {
                "100": {
                    "checksum": "sha256:f70165cc07c9c0edbada60c282cfd7b059e1c4dad3fd296c77238bfa7db74519",
                    "enabled": 1
                }
            },
            "certmonger": {
                "100": {
                    "checksum": "sha256:0ad5ac88aac27c3f65958235d1f71ae11ffdbbf0e30f2cd4a45fe0438f8b80c2",
                    "enabled": 1
                }
            },
            "certwatch": {
                "100": {
                    "checksum": "sha256:85e1d3dec4e26d58633e3a66dea23193fee975435de0432a33d9e3f0003148e3",
                    "enabled": 1
                }
            },
            "cfengine": {
                "100": {
                    "checksum": "sha256:0538edf6792efb191ae48f6f7ea825e7a0300b3f5941588a231e2e0ed84c3ef4",
                    "enabled": 1
                }
            },
            "cgroup": {
                "100": {
                    "checksum": "sha256:8a4ca1608501b748500da1cca0611feca68f892b6a0b931eff8391eff2880b83",
                    "enabled": 1
                }
            },
            "chrome": {
                "100": {
                    "checksum": "sha256:d778deea4a45cf38804a67181906680a20b1f94f779096b4b291658a3f4f7797",
                    "enabled": 1
                }
            },
            "chronyd": {
                "100": {
                    "checksum": "sha256:4472d378d0cca0cac8ee119b40beac9a1528f7e8afe3835fc868d9ee50e857ef",
                    "enabled": 1
                }
            },
            "cinder": {
                "100": {
                    "checksum": "sha256:65a156504f35a84298492048f07a967c74cca37467b5519175f0132356a4f3c0",
                    "enabled": 1
                }
            },
            "cipe": {
                "100": {
                    "checksum": "sha256:9aea31e33e0bbb1343acc95c4b96034c0e21cfc8098a6d9239c6fb2ddd964edd",
                    "enabled": 1
                }
            },
            "clock": {
                "100": {
                    "checksum": "sha256:5e2443a8b037a9f47d1082255f61bb3bc94ea76727732bec8ca477ed0f3d1cef",
                    "enabled": 1
                }
            },
            "clogd": {
                "100": {
                    "checksum": "sha256:ffb0c7d931ae1d6bbd575684ae14dbcc542b1a589a5c70235143b6494dbce74e",
                    "enabled": 1
                }
            },
            "cloudform": {
                "100": {
                    "checksum": "sha256:c5a2c5234b842e5d65fe5aa739e2d30ae18b7a4398fd02ec03ffb5db8b0022f5",
                    "enabled": 1
                }
            },
            "cmirrord": {
                "100": {
                    "checksum": "sha256:e55c52b2d9cca85a56a01f9a862ff089a008deb2e23d8dc36bf4db067f9d63d2",
                    "enabled": 1
                }
            },
            "cobbler": {
                "100": {
                    "checksum": "sha256:abdf9b12e79823ecdc818a9aaec4d6e4253f02e8c59b5c320677ae99096b8c8d",
                    "enabled": 1
                }
            },
            "collectd": {
                "100": {
                    "checksum": "sha256:fd6d1f0d8446afdd95c5c88850751a9def7fb10d8efef9113f7808431296374b",
                    "enabled": 1
                }
            },
            "colord": {
                "100": {
                    "checksum": "sha256:087d0f4a10495e4186632d167181b302e35abaaa4aee055628c907e4feb5e60f",
                    "enabled": 1
                }
            },
            "comsat": {
                "100": {
                    "checksum": "sha256:c7d8f1c628aba8babecb74c405389e5508768fce1c19a1a3ffb1f7e2cf21a9bc",
                    "enabled": 1
                }
            },
            "condor": {
                "100": {
                    "checksum": "sha256:a64ce927d71bdb89976bb02ee81c1c0cd362ce79760d529864deb95d78435a81",
                    "enabled": 1
                }
            },
            "conman": {
                "100": {
                    "checksum": "sha256:8610bf4d3c1c1fe1f037c0149f5126ed724947671d3ce2f43ce03318e5bbd6e9",
                    "enabled": 1
                }
            },
            "conntrackd": {
                "100": {
                    "checksum": "sha256:38e74734b46d764bdf548bae5eb0322cb3efab0139b2c57c6293ad4270b2cd72",
                    "enabled": 1
                }
            },
            "consolekit": {
                "100": {
                    "checksum": "sha256:08fd88b15eadd335fb6909b0e19ccfc7fce465a0e1adf66ea23375a528c02a2b",
                    "enabled": 1
                }
            },
            "coreos_installer": {
                "100": {
                    "checksum": "sha256:7aa6f4a2c4b409fb85dbc8e9e1b44a3166437ee81341247655d783b4fc8a3538",
                    "enabled": 1
                }
            },
            "couchdb": {
                "100": {
                    "checksum": "sha256:c2b8357f1cbba4813f68e96b8683bf56117ea18a08f509c47a9007d6dd7e37d0",
                    "enabled": 1
                }
            },
            "courier": {
                "100": {
                    "checksum": "sha256:bee8013d915739bd1e9a8ccb7d6936aa2d05ed68358b0c5624f9b7b20327a489",
                    "enabled": 1
                }
            },
            "cpucontrol": {
                "100": {
                    "checksum": "sha256:c62f78f9a781fdb8c455b4e1c444206466a7a572e499360a40ea752d8eebb332",
                    "enabled": 1
                }
            },
            "cpufreqselector": {
                "100": {
                    "checksum": "sha256:2155538ddced6cc2efbc8c54879c34b385aa1407ea62157644f787ea998de988",
                    "enabled": 1
                }
            },
            "cpuplug": {
                "100": {
                    "checksum": "sha256:6287b50c36817f7293217a030803bf3d62c707699a0e529c7d067989248ddcf8",
                    "enabled": 1
                }
            },
            "cron": {
                "100": {
                    "checksum": "sha256:5b654bad71770454ff600a55533f4da984770414d0cb1541e5d6035b73e90544",
                    "enabled": 1
                }
            },
            "ctdb": {
                "100": {
                    "checksum": "sha256:867a24cfaf2c6935e7c2f0f85e3f9b47de8126ad509db5f7aecdf572492a982c",
                    "enabled": 1
                }
            },
            "cups": {
                "100": {
                    "checksum": "sha256:acfc1b83372629d838cd2e1a291d1e88ad352fb5577eee01c1bcf460d8444883",
                    "enabled": 1
                }
            },
            "cvs": {
                "100": {
                    "checksum": "sha256:b7034b37cb6a943ec0e0b1122fb168440623b525b9d9a871c8f0f80f01427b0e",
                    "enabled": 1
                }
            },
            "cyphesis": {
                "100": {
                    "checksum": "sha256:6e83813cb5e03c15e648a7e798aae3e9c94f9a54ad542e971f176ab9271bc24e",
                    "enabled": 1
                }
            },
            "cyrus": {
                "100": {
                    "checksum": "sha256:d1da2a6beba9e0cd0bc492e8d8cafc7a9d3f1b13342967badb49fba668f6a985",
                    "enabled": 1
                }
            },
            "daemontools": {
                "100": {
                    "checksum": "sha256:1c94c8a99b9623387df88f2693f049682b4d4fc392904afab5bc9ba7a9ccb66a",
                    "enabled": 1
                }
            },
            "dbadm": {
                "100": {
                    "checksum": "sha256:0e9b5b78865e451e40a8f55b406801e55e98d4812c95f37130874438831a9145",
                    "enabled": 1
                }
            },
            "dbskk": {
                "100": {
                    "checksum": "sha256:6d9c565c834b41796ed712b843c7bd664fffafb667765c8d857432498388f9ff",
                    "enabled": 1
                }
            },
            "dbus": {
                "100": {
                    "checksum": "sha256:49b7477e868249bf17c6b5467b705d12e9ddc87bf2060898e7529a646a0f7e34",
                    "enabled": 1
                }
            },
            "dcc": {
                "100": {
                    "checksum": "sha256:e223c213d51685baf4d3be0c5c4805482c6954bd890d803062ddba896654b483",
                    "enabled": 1
                }
            },
            "ddclient": {
                "100": {
                    "checksum": "sha256:51d8f84dc3a8acb26b725e61aed99a1fdf79ea9dbc63fc7200a8d4697ff4c9c1",
                    "enabled": 1
                }
            },
            "denyhosts": {
                "100": {
                    "checksum": "sha256:723f8c7cb84bd0f48a15663d09d67eb67be7f89b275c9ab2966c88ca8d8ac18a",
                    "enabled": 1
                }
            },
            "devicekit": {
                "100": {
                    "checksum": "sha256:9a380e1b546cf2d8eab55a099515bccd1b25d2dc0d0ba28666e7b95f9d665989",
                    "enabled": 1
                }
            },
            "dhcp": {
                "100": {
                    "checksum": "sha256:1735881cc778e961bd742e846686425ec4014676e507460c85158f83ffc131ad",
                    "enabled": 1
                }
            },
            "dictd": {
                "100": {
                    "checksum": "sha256:a4988c85c75b5fd1b4a38b6335bc3fb962c0e78693042fc35785ce68ad5cb371",
                    "enabled": 1
                }
            },
            "dirsrv": {
                "100": {
                    "checksum": "sha256:43becf8b756ebd6e31e51c71f8963de116feaff35ddc0d6f15c4cf9da79b9c73",
                    "enabled": 1
                }
            },
            "dirsrv-admin": {
                "100": {
                    "checksum": "sha256:4c8ca106fff85e11e0a1829ed2ac3f243e16e91d7b782b98ef8999501bf0ba95",
                    "enabled": 1
                }
            },
            "dmesg": {
                "100": {
                    "checksum": "sha256:0dc55ec34569ba57e832be6e33834acf3055e707f8a04e644a517fe6c25fbe0d",
                    "enabled": 1
                }
            },
            "dmidecode": {
                "100": {
                    "checksum": "sha256:9a4b3d93e6fe7a710d739a682991df8e4fb3bfa5c4408f80b7bcf8bded0b3f91",
                    "enabled": 1
                }
            },
            "dnsmasq": {
                "100": {
                    "checksum": "sha256:c835e6157080af15bad60e3175ec73411ecc09c79b1f24488d38dbb43e49c524",
                    "enabled": 1
                }
            },
            "dnssec": {
                "100": {
                    "checksum": "sha256:77d3836aae67432fe5aaad6cf960c57c4c540253b884d0da7ce24f527f480bbb",
                    "enabled": 1
                }
            },
            "dovecot": {
                "100": {
                    "checksum": "sha256:dba3ca8b279efbe4c9d41b20c94f4eaaf69b63234781b6bffc7a0186e761d972",
                    "enabled": 1
                }
            },
            "drbd": {
                "100": {
                    "checksum": "sha256:b2a6e422462c98deab9be1ebbd7b4c0e541652cef6544eb817890b59691f6f63",
                    "enabled": 1
                }
            },
            "dspam": {
                "100": {
                    "checksum": "sha256:a0e4a3f431978461ff0c6606dece39299123c11c0c50dd07dec0523b087e6d69",
                    "enabled": 1
                }
            },
            "entropyd": {
                "100": {
                    "checksum": "sha256:ae9d99bc78f2f39ed963de93eacb8d15e06f9695542787fd88dd34ae5c417030",
                    "enabled": 1
                }
            },
            "exim": {
                "100": {
                    "checksum": "sha256:df1cfbb1ab78dbc0de7189c60173c3e0fa87640761050a6ee1915ad7b268f937",
                    "enabled": 1
                }
            },
            "fail2ban": {
                "100": {
                    "checksum": "sha256:9050e1e27a67f06989fd7b1ae77b9c0086f4017aa6f4b3123e4d40b4c4be24ef",
                    "enabled": 1
                }
            },
            "fcoe": {
                "100": {
                    "checksum": "sha256:de1005aba353d2cd82e7d2542b9f0f22a537c0836598dc0d7b3fc739b0a1512d",
                    "enabled": 1
                }
            },
            "fdo": {
                "100": {
                    "checksum": "sha256:0040a417e0edd66d8c3ebbc52a7b9a61a114724ef1b47f41e7d0e0dd9f496667",
                    "enabled": 1
                }
            },
            "fetchmail": {
                "100": {
                    "checksum": "sha256:052196a9650b28088637ad2c1bc2e3e18c9ebb26b9d81fc22d06f9383448e082",
                    "enabled": 1
                }
            },
            "finger": {
                "100": {
                    "checksum": "sha256:7e077f2f9cbb7a67e901983f068e542f2ea7bf8fbd32398624006f2ec3721469",
                    "enabled": 1
                }
            },
            "firewalld": {
                "100": {
                    "checksum": "sha256:074ef28f63316a886aa80247be41c3f4eb0c4ab24be2538204a8473c206377dd",
                    "enabled": 1
                }
            },
            "firewallgui": {
                "100": {
                    "checksum": "sha256:9123c9d8a6246155165e536233f76373dce5ed442b693d3adfad2cfb0d045d14",
                    "enabled": 1
                }
            },
            "firstboot": {
                "100": {
                    "checksum": "sha256:bd1c559e70c0fa8a6dd0e9e541410e8def49a3f8769e609a7371bcae87cbc7a1",
                    "enabled": 1
                }
            },
            "fprintd": {
                "100": {
                    "checksum": "sha256:651a0158d860694e2f7e3c2ff5842cb1167edd212f207d25fbd048cc0bca8b1e",
                    "enabled": 1
                }
            },
            "freeipmi": {
                "100": {
                    "checksum": "sha256:b0055e597efbe80253d626f80a865cb814f393fa91e66afd4458d436fa896318",
                    "enabled": 1
                }
            },
            "freqset": {
                "100": {
                    "checksum": "sha256:e8a220c3eef785816671acd81eb445e324d475a084fbd06263b797d578d02f3c",
                    "enabled": 1
                }
            },
            "fstools": {
                "100": {
                    "checksum": "sha256:127cb7df805e3a46359a5207b063f90c1d19e6d3198182767ed70779b4b28221",
                    "enabled": 1
                }
            },
            "ftp": {
                "100": {
                    "checksum": "sha256:9902176e4edcbecebd0f6ac300b28794668de3d4540a9ae5be717f396b0292be",
                    "enabled": 1
                }
            },
            "fwupd": {
                "100": {
                    "checksum": "sha256:c052a6835e70d9c0e051e979e81764ebb89f6f133a440db25d1dde163ffa48c4",
                    "enabled": 1
                }
            },
            "games": {
                "100": {
                    "checksum": "sha256:45772c12ebf653bb9a623771101b99ab2ffe914d03370ebfbe3b0912ca0d6b8a",
                    "enabled": 1
                }
            },
            "gdomap": {
                "100": {
                    "checksum": "sha256:1e46eff6833500ac418eda8676b641bb33eeeaec5ade25dc6d3ab091f736cad1",
                    "enabled": 1
                }
            },
            "geoclue": {
                "100": {
                    "checksum": "sha256:708228486a23ad80ffd7d6a05a65a7b655f1dbc738ef8479e473d60e4cc6d3f7",
                    "enabled": 1
                }
            },
            "getty": {
                "100": {
                    "checksum": "sha256:32e35dc7eeec4247db73e826bcd13b91f8370e8544b223547f4354f42b8937ab",
                    "enabled": 1
                }
            },
            "git": {
                "100": {
                    "checksum": "sha256:d7d5bebe879118666f1417ffae9f0772e0889406dcc768c4c3b394ec7cc2a231",
                    "enabled": 1
                }
            },
            "gitosis": {
                "100": {
                    "checksum": "sha256:b3e2375f3c95924b4daa45ecd4951af233195f655f5cab28298b21782e7df2f0",
                    "enabled": 1
                }
            },
            "glance": {
                "100": {
                    "checksum": "sha256:9414bf98dd0ad365e8c880b881286f51845c6a14bb7fc1022770cbf78c33152c",
                    "enabled": 1
                }
            },
            "gnome": {
                "100": {
                    "checksum": "sha256:86efbd64d547d08ff38c0ef19d92899032b5c7dd0972e1831dc8b58211e46d91",
                    "enabled": 1
                }
            },
            "gpg": {
                "100": {
                    "checksum": "sha256:b547a37d1a7e474dd3e085d36e4ca7276ccd68bf3ecf2e09fe7a846030a80020",
                    "enabled": 1
                }
            },
            "gpm": {
                "100": {
                    "checksum": "sha256:fc6c94c251896fa97e9298a902669eba5bb2179ad7ace408d5e6bc05973174e0",
                    "enabled": 1
                }
            },
            "gpsd": {
                "100": {
                    "checksum": "sha256:19c083472a464ada6846b9173292a9d72a06a4e5e778d69783bd51ecc1553eb0",
                    "enabled": 1
                }
            },
            "gssproxy": {
                "100": {
                    "checksum": "sha256:3c8410c8c59ae99a170ad00ee7ad66d186d3e83d66ad3c2300333abdaa7fb21c",
                    "enabled": 1
                }
            },
            "guest": {
                "100": {
                    "checksum": "sha256:eea7130d539ac61485efb94b90bbb6af7bf5dee0957c37e3ebee3a8da4797deb",
                    "enabled": 1
                }
            },
            "hddtemp": {
                "100": {
                    "checksum": "sha256:9722c02beeb44ba92062700b4c043eec7a1d39a7eb91a1289edea7a928129e0f",
                    "enabled": 1
                }
            },
            "hostapd": {
                "100": {
                    "checksum": "sha256:1d57e77e17f124ee365365513274cf402353add167245a839f89126f071bfbfb",
                    "enabled": 1
                }
            },
            "hostname": {
                "100": {
                    "checksum": "sha256:9b8c53c2622515f672ffc22c9f18a6db6fc2566e91dbdfea83faf54a6cd7c8ed",
                    "enabled": 1
                }
            },
            "hsqldb": {
                "100": {
                    "checksum": "sha256:2ab3e4455cd6580a63f4026c6b1b133e658bbca0de0d8d6c74a67ab0c3c866ad",
                    "enabled": 1
                }
            },
            "hwloc": {
                "100": {
                    "checksum": "sha256:cf362b5c9bd397a8d04ff430a173c5132b52a5fa0ca5c7ac1dd6d41920259ead",
                    "enabled": 1
                }
            },
            "hypervkvp": {
                "100": {
                    "checksum": "sha256:896fcf1247e335cbd9b09d4e0e15363dda32d2d0cede4c444416198af2330362",
                    "enabled": 1
                }
            },
            "ibacm": {
                "100": {
                    "checksum": "sha256:401eb3be55a99ce20f4514c94004d559a5dbb956979dc810c7288ed52ce20bef",
                    "enabled": 1
                }
            },
            "ica": {
                "100": {
                    "checksum": "sha256:40af9f330d0e9ae06e87e9501e571ad8dec8827a42fd207b91483ad64f73dab2",
                    "enabled": 1
                }
            },
            "icecast": {
                "100": {
                    "checksum": "sha256:c475483816608ce48e61163a5d661b65c9a4ddaf119607c204d94ec220c74f92",
                    "enabled": 1
                }
            },
            "iiosensorproxy": {
                "100": {
                    "checksum": "sha256:3b5ca951811250c2f5b8a0e9665ceed70a4f920db3160221838a59e6664627d9",
                    "enabled": 1
                }
            },
            "inetd": {
                "100": {
                    "checksum": "sha256:79c55156850062f9d931ff310184927ffb8fad23533e0a81e8603c0eeb39473d",
                    "enabled": 1
                }
            },
            "init": {
                "100": {
                    "checksum": "sha256:9d8a5e4eedfeb46631070df5c3aeb0abcbd44523ec5bad52dc3942254081c02a",
                    "enabled": 1
                }
            },
            "inn": {
                "100": {
                    "checksum": "sha256:7d215013470faf3ba6da02e9b0eadd84100e27d3811d5239652e29a834dee4c9",
                    "enabled": 1
                }
            },
            "insights_client": {
                "100": {
                    "checksum": "sha256:9a25ddb951183ffbc71b83ab24c259f1572d9d47278862371f7d2c2b67eff05d",
                    "enabled": 1
                }
            },
            "iodine": {
                "100": {
                    "checksum": "sha256:f7f9445c1945584a868329ec91cdf3812e2f1a19cf4ae8145a97ab5f60e9b728",
                    "enabled": 1
                }
            },
            "iotop": {
                "100": {
                    "checksum": "sha256:4db5d6a9195336b52e11d78acc306a36d4d9b93a4bf9931f7ce42197a55f9619",
                    "enabled": 1
                }
            },
            "ipmievd": {
                "100": {
                    "checksum": "sha256:ae94ecc6603e112053d82c4b034e371fa0ae3ea6227d22730a79c2d88f7f518c",
                    "enabled": 1
                }
            },
            "ipsec": {
                "100": {
                    "checksum": "sha256:dea36801533eb8484f81e791e8e9bafbe2ee01a0a60cfabd45fcc99d768c958a",
                    "enabled": 1
                }
            },
            "iptables": {
                "100": {
                    "checksum": "sha256:158f46e1903a9e4b59492cd3b6d002226ba99215a92f8ab7254cc201adfc6d41",
                    "enabled": 1
                }
            },
            "irc": {
                "100": {
                    "checksum": "sha256:8137b7bf2df668299a1f07a7357891c9b532623b8a4a647938502f0115a5719d",
                    "enabled": 1
                }
            },
            "irqbalance": {
                "100": {
                    "checksum": "sha256:edd68cf00488897a118c97d42b68b5ebc42eade076d435668de403386055768a",
                    "enabled": 1
                }
            },
            "iscsi": {
                "100": {
                    "checksum": "sha256:c6a126a43e805c50b75ce428c6d06f2098aa3832c4c2b776c27de47db763a973",
                    "enabled": 1
                }
            },
            "isns": {
                "100": {
                    "checksum": "sha256:730425a2f8fcf7def5a5d3cd7e2fe86c4798f48ed990f01b6c4f61c2c1af4729",
                    "enabled": 1
                }
            },
            "jabber": {
                "100": {
                    "checksum": "sha256:954964e3390965fb3bd16d9e0f04e5c1733b1d52d0f9aeb86c15097128847e98",
                    "enabled": 1
                }
            },
            "jetty": {
                "100": {
                    "checksum": "sha256:cd35fa8929bafd81093cfc39e523c8fe55b1f3ebfe105630920d9aa1f50d27a0",
                    "enabled": 1
                }
            },
            "jockey": {
                "100": {
                    "checksum": "sha256:60808a39b8af95362a9e430e000fe157e610f06845766c1bf84567986773c3a7",
                    "enabled": 1
                }
            },
            "journalctl": {
                "100": {
                    "checksum": "sha256:37b991b37d592bae92deb5719d208e9272492cc81358a603aeac66012da73303",
                    "enabled": 1
                }
            },
            "kdump": {
                "100": {
                    "checksum": "sha256:8df5cf83cd544674505896c1aa2d5bbc3a63bfec5bd23082efb6d3e87fb1373f",
                    "enabled": 1
                }
            },
            "kdumpgui": {
                "100": {
                    "checksum": "sha256:1037dc7bcf3027e597f682ebaed125ffe524999e5ed9e5e59ba4d2d96dd56928",
                    "enabled": 1
                }
            },
            "keepalived": {
                "100": {
                    "checksum": "sha256:8ea474a204f637775dfaf134e51c27da197f647c4c01121c398c7135d17ae93a",
                    "enabled": 1
                }
            },
            "kerberos": {
                "100": {
                    "checksum": "sha256:7191052f585d5fafbac635931a6731644f0bd083abc2af3de0f9cf8a09dfa012",
                    "enabled": 1
                }
            },
            "keyboardd": {
                "100": {
                    "checksum": "sha256:f0d2c1e478cf050cc9a4975c3e477c7ace50c8ec4f24e6378c3bf9f5375ac47c",
                    "enabled": 1
                }
            },
            "keystone": {
                "100": {
                    "checksum": "sha256:fdb4d581281615682a3d84cb0d172eb400d4e421e05fa6eb53e935c998eb66c1",
                    "enabled": 1
                }
            },
            "kismet": {
                "100": {
                    "checksum": "sha256:3ba626d0e10d52e23eb25ed2bcfb3333d10724cc37b811d191e2377b0a50a32c",
                    "enabled": 1
                }
            },
            "kmscon": {
                "100": {
                    "checksum": "sha256:c362a617fac2d877d61251310ac60e2dd1f914746224fb481fc5877ac4c9e615",
                    "enabled": 1
                }
            },
            "kpatch": {
                "100": {
                    "checksum": "sha256:ea52717eb9f8414bf6a91da0e0dcdf8911d0dbdc6ef24636e3d55364f9d74a48",
                    "enabled": 1
                }
            },
            "ksmtuned": {
                "100": {
                    "checksum": "sha256:3792d937dae3c0c5020fcd3d231635e0e3bce9855f5182f4a78596b402b1e01e",
                    "enabled": 1
                }
            },
            "ktalk": {
                "100": {
                    "checksum": "sha256:c341246894ef6ac35ff57578dad797e3cab4576289ed54fe79a8f520d5f97586",
                    "enabled": 1
                }
            },
            "l2tp": {
                "100": {
                    "checksum": "sha256:0e48d9b7b7fa1119f136c8069d0dc8b1411c4fab98855647ca97a58e20f49771",
                    "enabled": 1
                }
            },
            "ldap": {
                "100": {
                    "checksum": "sha256:bfe184a21cf22e874bf9c4adf17d92ab37f78b212bac0a1e4205605666a72c5e",
                    "enabled": 1
                }
            },
            "libraries": {
                "100": {
                    "checksum": "sha256:2e54d7f7a9bfb8313eb16163e91dbc59ebe37e99c5d1185a1e94301b026ce971",
                    "enabled": 1
                }
            },
            "likewise": {
                "100": {
                    "checksum": "sha256:076102b64c364619c722ec50ff1bc6711583a48a3e4d628b3d5b702664ded6db",
                    "enabled": 1
                }
            },
            "linuxptp": {
                "100": {
                    "checksum": "sha256:92aa4605402b250c0535d730f0e42463c3d03ac198c39426d44a9318a193bc8c",
                    "enabled": 1
                }
            },
            "lircd": {
                "100": {
                    "checksum": "sha256:be1fcc3f6423021d5dfff876c22329b76e2a8a3408277643cf19b387d3af18df",
                    "enabled": 1
                }
            },
            "livecd": {
                "100": {
                    "checksum": "sha256:ebc240faa5377ca5d45a084da15424d873958197df22f16e7781f67da72c02da",
                    "enabled": 1
                }
            },
            "lldpad": {
                "100": {
                    "checksum": "sha256:2b7cf634dca82f4249678877fd64f778d09e1fae552c7747e783d9122509e197",
                    "enabled": 1
                }
            },
            "loadkeys": {
                "100": {
                    "checksum": "sha256:de8b3dab704fe78e803c012052bf2890d7e87b8b76d8fdfbf613d6d697f01c9d",
                    "enabled": 1
                }
            },
            "locallogin": {
                "100": {
                    "checksum": "sha256:952fe72cafbed51e96e7f051d9523c1ca3ef665b28c5b0f3c0d11d521258daac",
                    "enabled": 1
                }
            },
            "lockdev": {
                "100": {
                    "checksum": "sha256:31cb1e12fe7d8fbd64fe9e9913a00ac3aaebba1aa074abf1ab9bf76e101f7d87",
                    "enabled": 1
                }
            },
            "logadm": {
                "100": {
                    "checksum": "sha256:bc3d6d6cdcb3b2dac1131f16f15bed74c8b1fa37a353da2793cde2061ffdc6b4",
                    "enabled": 1
                }
            },
            "logging": {
                "100": {
                    "checksum": "sha256:78e8c00d69c84ea399c88137b1c5276084c98a468eb4df58c13894c8c569cd18",
                    "enabled": 1
                }
            },
            "logrotate": {
                "100": {
                    "checksum": "sha256:d9cbeec25733f9393c0967a8f9726bd7cd1d070c3b86c0c0d82379601b99b3dd",
                    "enabled": 1
                }
            },
            "logwatch": {
                "100": {
                    "checksum": "sha256:4195de7172d5d5d1bde97be084e23e80032e07b4f2330ac5620759d4910a4da5",
                    "enabled": 1
                }
            },
            "lpd": {
                "100": {
                    "checksum": "sha256:cf8fb1e0de66b91a3d35dd0b5a5f93a69937c1be4a8103d10e6edb70d17a4830",
                    "enabled": 1
                }
            },
            "lsm": {
                "100": {
                    "checksum": "sha256:e7a424ee7f32c812faea57710e6766f23963ec2e5b2a38486a6c024d160f9c23",
                    "enabled": 1
                }
            },
            "lttng-tools": {
                "100": {
                    "checksum": "sha256:ccb6b5c8378542594d25426623373c4de49f01e37b8fd0f2bed8d7c4f83216b9",
                    "enabled": 1
                }
            },
            "lvm": {
                "100": {
                    "checksum": "sha256:19d491afd9f4b258e4ec73ddbb3243feafc28db5f0c836784f8fa29b2146d215",
                    "enabled": 1
                }
            },
            "mailman": {
                "100": {
                    "checksum": "sha256:e121209046f2487ef64048e7ae1408da84d9c6ee6a88d6639cef3b6c4b2be19a",
                    "enabled": 1
                }
            },
            "mailscanner": {
                "100": {
                    "checksum": "sha256:528e84d30728ad73783b1cf6992be9cc1bc14f77f4d5e0ce6ca6ea2d759f2061",
                    "enabled": 1
                }
            },
            "man2html": {
                "100": {
                    "checksum": "sha256:e3a13e4c9a9f651ba32221ebaa37b8a2f7d15f7480622a755f8301022dcc8f15",
                    "enabled": 1
                }
            },
            "mandb": {
                "100": {
                    "checksum": "sha256:2c098f0612bbd9d9ee9db00a817c51726da69e7536d687adf74be7d4df7911f8",
                    "enabled": 1
                }
            },
            "mcelog": {
                "100": {
                    "checksum": "sha256:48343f6df53f591eff2c6a76bfbf12f351daa9e382785fb47e1017cd7badab91",
                    "enabled": 1
                }
            },
            "mediawiki": {
                "100": {
                    "checksum": "sha256:a901c614ce730a6943df15300120f9c0dab9fa89f234c0301f4a995f5a1a60cb",
                    "enabled": 1
                }
            },
            "memcached": {
                "100": {
                    "checksum": "sha256:03335203d0a113eead2d95a159df467fc164d12cc1c9ce4b58149da7b80d7943",
                    "enabled": 1
                }
            },
            "milter": {
                "100": {
                    "checksum": "sha256:bfbe9652c48ed1b2abdba90720f2abff11a3c31a72f5b3c56e8eac168542072f",
                    "enabled": 1
                }
            },
            "minidlna": {
                "100": {
                    "checksum": "sha256:d61743f3489bbc08417d6dbc894be0f19b50bb6e76bdb0b9a344a5b29a565a91",
                    "enabled": 1
                }
            },
            "minissdpd": {
                "100": {
                    "checksum": "sha256:ab3a5b4c6d53cd2d6d1bb1e32d587bd65219f22d8f94b58d2f9948fcc6d4bfa5",
                    "enabled": 1
                }
            },
            "mip6d": {
                "100": {
                    "checksum": "sha256:56a370111ea7709b149e4a0fbdb1ac1b123427831161d3f6170efa64a18aeb5e",
                    "enabled": 1
                }
            },
            "mirrormanager": {
                "100": {
                    "checksum": "sha256:37cda21ca34e7585b6fb861d91cafeb146ca75d0a7878bbc06ab24eabe6706c3",
                    "enabled": 1
                }
            },
            "miscfiles": {
                "100": {
                    "checksum": "sha256:f04df10eaf97dd9eb0520f7c208e6002d4c695acfb2ce58e52fd8b689c587226",
                    "enabled": 1
                }
            },
            "mock": {
                "100": {
                    "checksum": "sha256:6bf94b1043da99327b1f68a10215d963bdd7b0a7f7c3f74c801d1a8db22542d7",
                    "enabled": 1
                }
            },
            "modemmanager": {
                "100": {
                    "checksum": "sha256:412ba79b5e6a4132630f2b8da80b9e66ff5992e81ebcc206ec2f90c67ccf4ee5",
                    "enabled": 1
                }
            },
            "modutils": {
                "100": {
                    "checksum": "sha256:9fd39aa8cd5ccc6d38817a0dc4915dedce287c3e5ccd5063d5e890196686d253",
                    "enabled": 1
                }
            },
            "mojomojo": {
                "100": {
                    "checksum": "sha256:665b33a4d2e32a1a3b08ebaca792c7d1093782e3f885d048c5c2be57cea07d1e",
                    "enabled": 1
                }
            },
            "mon_statd": {
                "100": {
                    "checksum": "sha256:17b96152a9ff5a04a7cd3514903bca98e78369bc5791e7bb88aab6dcc47e3b7d",
                    "enabled": 1
                }
            },
            "mongodb": {
                "100": {
                    "checksum": "sha256:acd421e39f4c53a011a47ef7a271efc7d4f85a97575d03f69e30dedfaa1b14c2",
                    "enabled": 1
                }
            },
            "motion": {
                "100": {
                    "checksum": "sha256:9c2059177a49f2cfddca3629a29929594aec4b9dcd1fa06a80c1119fa687ac1f",
                    "enabled": 1
                }
            },
            "mount": {
                "100": {
                    "checksum": "sha256:0469642c05b99ec3b9f0472e91d161feead7bf4c4a4190cfd54b856ea9b93ea4",
                    "enabled": 1
                }
            },
            "mozilla": {
                "100": {
                    "checksum": "sha256:109d333319ff37383f2e3f6bfa356fb24b7adf3702c51f8badb8a4714c99a430",
                    "enabled": 1
                }
            },
            "mpd": {
                "100": {
                    "checksum": "sha256:4ba142d40036af5be213284b79dd953533bcb4d9846c3b697813002b98107b7a",
                    "enabled": 1
                }
            },
            "mplayer": {
                "100": {
                    "checksum": "sha256:e778408f9ad76e2da9c32482ac1f0c0495f6f552ee39fea95dccc818c70a7798",
                    "enabled": 1
                }
            },
            "mptcpd": {
                "100": {
                    "checksum": "sha256:bf848203e9b4e05ee5da14c2ced4592f7147f2674b296dd0ff76049364cb5d6d",
                    "enabled": 1
                }
            },
            "mrtg": {
                "100": {
                    "checksum": "sha256:c14a0cfca79de2171c617ec3aa77ab2a0358a78678c6711d570fe829e993a1dd",
                    "enabled": 1
                }
            },
            "mta": {
                "100": {
                    "checksum": "sha256:b8a45a6236afbcd2102f71330ffd2598a99531ec55b84be04b210c3cdea0d6dd",
                    "enabled": 1
                }
            },
            "munin": {
                "100": {
                    "checksum": "sha256:c0e62e19e20f833e62ad6a5fba025b3fc5d5ada2ea29db094f648dfa72cf713c",
                    "enabled": 1
                }
            },
            "mysql": {
                "100": {
                    "checksum": "sha256:5513598214e4ac4737a0f73a4349d8f786334d62ca92ea0099a91d89f5717103",
                    "enabled": 1
                }
            },
            "mythtv": {
                "100": {
                    "checksum": "sha256:971a0c0ef295e7fa2ec443ae9e0d752bb1acab9928fa0c233995b7e7f3f1aad7",
                    "enabled": 1
                }
            },
            "naemon": {
                "100": {
                    "checksum": "sha256:75db52cc67150da8946fb064fa2508885272c63af0628d48c4a35655eb912b79",
                    "enabled": 1
                }
            },
            "nagios": {
                "100": {
                    "checksum": "sha256:68a8d2f4d8b1ebda8d47cb325bed05299f768c756932cf3bc9c027b32142f234",
                    "enabled": 1
                }
            },
            "namespace": {
                "100": {
                    "checksum": "sha256:11505cafa9be1281e93b45a77229c321ac6bafb99673bc4c22e5326a42efca0c",
                    "enabled": 1
                }
            },
            "ncftool": {
                "100": {
                    "checksum": "sha256:5fd7eb85c1fb665c271665cf5c419d3dbb6305dfa40bfa34e8246cdb1232fce2",
                    "enabled": 1
                }
            },
            "netlabel": {
                "100": {
                    "checksum": "sha256:dec9414d3310d4f06ae940978da1b81fea6cbbd52eade15a5c7277558df3cc7b",
                    "enabled": 1
                }
            },
            "netutils": {
                "100": {
                    "checksum": "sha256:31e40dfd1f5a028f5bc20da7b21ebb5103787122703feaeec8555eb067ce41be",
                    "enabled": 1
                }
            },
            "networkmanager": {
                "100": {
                    "checksum": "sha256:26ed3cfe7224044d84743054fa4c4a4fe11b0dadbae54648d2e3c47b9f5e1b5d",
                    "enabled": 1
                }
            },
            "ninfod": {
                "100": {
                    "checksum": "sha256:9b4707184af17bb045236a2b198dc769a6c37716cb03b1c7b49698620ac0d00b",
                    "enabled": 1
                }
            },
            "nis": {
                "100": {
                    "checksum": "sha256:529d649b899b2609c0555f37e1bffd5d764943134a1a36a44bd3c0e58c42ac9b",
                    "enabled": 1
                }
            },
            "nova": {
                "100": {
                    "checksum": "sha256:8072b8372f9a40e1252ec63a0cec6687eef0f7fdec796831fe7359258fae71d7",
                    "enabled": 1
                }
            },
            "nscd": {
                "100": {
                    "checksum": "sha256:21e4816c7552451bf7003ff77e760c89894101990008582618e0e1d183c8bf4c",
                    "enabled": 1
                }
            },
            "nsd": {
                "100": {
                    "checksum": "sha256:811d6c99554491f38f1f09d4d6ec47a7bedbd438ff4aa0c0a9cf5bcbd635b58b",
                    "enabled": 1
                }
            },
            "nslcd": {
                "100": {
                    "checksum": "sha256:1f31f04eb1d7670d7b20305cc9630bd997a7422e591c90fc43ff11e86ce3033f",
                    "enabled": 1
                }
            },
            "ntop": {
                "100": {
                    "checksum": "sha256:35e90cfdcf607f9adedf10cf3e6230e04d4d9186012285a83d2a0af49babd413",
                    "enabled": 1
                }
            },
            "ntp": {
                "100": {
                    "checksum": "sha256:00a08503da498b8a8e909870a25c9e96095d58532cac58be44050af75b2565fb",
                    "enabled": 1
                }
            },
            "numad": {
                "100": {
                    "checksum": "sha256:a067fc44175cf9c5a7aa2763203f773cfe826dd0426c252d4ab6b2aae38c5875",
                    "enabled": 1
                }
            },
            "nut": {
                "100": {
                    "checksum": "sha256:837a6aa61c338fd1711d508ec7ec1430704e05d3e1447c075ac5790c25cb625d",
                    "enabled": 1
                }
            },
            "nvme_stas": {
                "100": {
                    "checksum": "sha256:6da583b7229d5e0e9044bdb93e0b2c24683b50d7b98ac4b7030f2badfb4a3977",
                    "enabled": 1
                }
            },
            "nx": {
                "100": {
                    "checksum": "sha256:99e37fc91859f012471c0382fb758ebb6276680c1aaa487fbfd5a0bb0fcbd32c",
                    "enabled": 1
                }
            },
            "obex": {
                "100": {
                    "checksum": "sha256:eae97e1b0d9f65da798618786f7a44fa088ba644fe43bd46cd518c0838d3317d",
                    "enabled": 1
                }
            },
            "oddjob": {
                "100": {
                    "checksum": "sha256:bd4443d1334e92e171729074cce48baecb8e4707aad0eb6f25d106886866d325",
                    "enabled": 1
                }
            },
            "opafm": {
                "100": {
                    "checksum": "sha256:9e0ccc324238937c2fb3cc36ecb8210c7691b805f3739b23e1cef95be82bff17",
                    "enabled": 1
                }
            },
            "openct": {
                "100": {
                    "checksum": "sha256:66b940104f2ee7b701d17b5f2b7c5787c4d0d27c8434753cd5ffdc34ad662a3e",
                    "enabled": 1
                }
            },
            "opendnssec": {
                "100": {
                    "checksum": "sha256:f0ac631bf1cab954ad343673dbcf311ce2686f1a90858ea31ef2b06260e2e142",
                    "enabled": 1
                }
            },
            "openfortivpn": {
                "100": {
                    "checksum": "sha256:0f8abc6a81b955b9888753f1b26342f1a4c943bdc0ced8cdcfde51c2cd12e0c6",
                    "enabled": 1
                }
            },
            "openhpid": {
                "100": {
                    "checksum": "sha256:b0b40f9da8cbf6f96048d61d33cdedd8c818a8ed3177de37291685089ade8483",
                    "enabled": 1
                }
            },
            "openshift": {
                "100": {
                    "checksum": "sha256:890bfacbe2ba8de8ee35c0d7bb5a8191fdb49819d0d64441bd1d4f442d34adbf",
                    "enabled": 1
                }
            },
            "openshift-origin": {
                "100": {
                    "checksum": "sha256:87d03b717c75c41a100d297e542c47787922e5dd2f01d7b90071263d48687975",
                    "enabled": 1
                }
            },
            "opensm": {
                "100": {
                    "checksum": "sha256:b017922f020abdd60b85a2b5d4743f982e85fca7f783dd32be78311fd5197ba7",
                    "enabled": 1
                }
            },
            "openvpn": {
                "100": {
                    "checksum": "sha256:e165f80516476ffe1b93bdd74ad3a6d69720e0136fc3620f6ec7710dc9765007",
                    "enabled": 1
                }
            },
            "openvswitch": {
                "100": {
                    "checksum": "sha256:e9581e7e22fd035c8e7312f22d04171ffb807e16eb57205413fcca8deac68fc7",
                    "enabled": 1
                }
            },
            "openwsman": {
                "100": {
                    "checksum": "sha256:42155472938e6b25076cda306a0c176db03ae2722597fd4004380b5222589b67",
                    "enabled": 1
                }
            },
            "oracleasm": {
                "100": {
                    "checksum": "sha256:f47fdeba48ebedde1b490b598cb46fd8b30d4e86264f7f3ce68bd2af91409792",
                    "enabled": 1
                }
            },
            "osad": {
                "100": {
                    "checksum": "sha256:ba8c88da0b90ee0eec84f709a7a89bb3b3e458db643317646e8379cb9d403255",
                    "enabled": 1
                }
            },
            "pads": {
                "100": {
                    "checksum": "sha256:6279e391de4f2978d93dd1a88c23aeffb8028bc50d81a0776a7247a011b3898f",
                    "enabled": 1
                }
            },
            "passenger": {
                "100": {
                    "checksum": "sha256:09ef31338f328d05054169704c4cdcb78f291a93fd0d5165fdb33409d1c46018",
                    "enabled": 1
                }
            },
            "pcmcia": {
                "100": {
                    "checksum": "sha256:94fb82e5d82810193cc60f465995348d0fd733501f2691d9cf8058b4bc611078",
                    "enabled": 1
                }
            },
            "pcp": {
                "100": {
                    "checksum": "sha256:c8224a15f7049ea64edc179a5f7b940ffe72c46266cf3bccdf125b1b929e975b",
                    "enabled": 1
                }
            },
            "pcscd": {
                "100": {
                    "checksum": "sha256:b33562b9e8be469abed92ac9cb29e55e58e5d28caf5c5a295486fa1da4035d6b",
                    "enabled": 1
                }
            },
            "pdns": {
                "100": {
                    "checksum": "sha256:e41889c43b795845eb734032b62894802290e804baecf62685e53211ee3997fc",
                    "enabled": 1
                }
            },
            "pegasus": {
                "100": {
                    "checksum": "sha256:e9b396ef7a02cba4482e9f56fde9f1fbfa7e04de4dfd3d80b3523ddb332ffdab",
                    "enabled": 1
                }
            },
            "permissivedomains": {
                "100": {
                    "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2",
                    "enabled": 1
                }
            },
            "pesign": {
                "100": {
                    "checksum": "sha256:8b5834f435b3bd76aba49516a21dcc5f45c867c4c1e748543e4c573085c7a15b",
                    "enabled": 1
                }
            },
            "pingd": {
                "100": {
                    "checksum": "sha256:ac0c04cef30f7c01619c07f9e4c2028a7d647cafd46e818e163222bb9f6a98ba",
                    "enabled": 1
                }
            },
            "piranha": {
                "100": {
                    "checksum": "sha256:7518a890684f833f06a9e0db0bc13bc187c3462f83aa0c07848d0fdf8f9d5461",
                    "enabled": 1
                }
            },
            "pkcs": {
                "100": {
                    "checksum": "sha256:2daf9e32ec14aa1b96f49dbc4cdd4afd7d666a87e2ce3acf5c35b32a681fa3e4",
                    "enabled": 1
                }
            },
            "pkcs11proxyd": {
                "100": {
                    "checksum": "sha256:7ab6b9b9691f9a43bb258c657cb2748c10b811530461739b2449a7dcbedc6d5d",
                    "enabled": 1
                }
            },
            "pki": {
                "100": {
                    "checksum": "sha256:949a05604dd067f4bfbe8aefc95565ac5f1b14598713063d245e8f38fbf01a9a",
                    "enabled": 1
                }
            },
            "plymouthd": {
                "100": {
                    "checksum": "sha256:873b2ae3732ee828b2fe956739072318924e333974d09be23d8af18d55150de5",
                    "enabled": 1
                }
            },
            "podsleuth": {
                "100": {
                    "checksum": "sha256:fe135f8a642cd53b19fcbeca60b9eb5e0d2c5cc84f89167e686ae5f9fa42e6ed",
                    "enabled": 1
                }
            },
            "policykit": {
                "100": {
                    "checksum": "sha256:c6dc9c24a34be05b9fecb9dc2566e35a47d7b5d0a70ce3249dda642258374f5f",
                    "enabled": 1
                }
            },
            "polipo": {
                "100": {
                    "checksum": "sha256:0a89a59bbe58e1a5a0d8bb9dab70b6967cda66ce3a110993446d1213a488b631",
                    "enabled": 1
                }
            },
            "portmap": {
                "100": {
                    "checksum": "sha256:15d9f332240b57891a19bd34578401f532242fa4fdae003d872eb1ddb009cf86",
                    "enabled": 1
                }
            },
            "portreserve": {
                "100": {
                    "checksum": "sha256:69fec82f8d2a804a8641167815d32835237f878fe7d9d52297f7d4f4e732f3a8",
                    "enabled": 1
                }
            },
            "postfix": {
                "100": {
                    "checksum": "sha256:c4c885b4103c94428b70933dadb8809fa695b3296d474948aac039bd6f019c87",
                    "enabled": 1
                }
            },
            "postgresql": {
                "100": {
                    "checksum": "sha256:193af5fba661c32470026dbf229440236737a59efb53b0fabe2c9aba14c35ccc",
                    "enabled": 1
                }
            },
            "postgrey": {
                "100": {
                    "checksum": "sha256:f3beab7d301e925c9114fc16905d28eb713bc118d215abe2f17a3db1514ff93a",
                    "enabled": 1
                }
            },
            "ppp": {
                "100": {
                    "checksum": "sha256:8673b905d5b897c499e6911d91201e349af2666d906dbe2c1abc39c9f3a54116",
                    "enabled": 1
                }
            },
            "prelink": {
                "100": {
                    "checksum": "sha256:99583ebe5e11399512e284d9d4de0752a1a6832e629953072b9ee94bb3980c8f",
                    "enabled": 1
                }
            },
            "prelude": {
                "100": {
                    "checksum": "sha256:a061be8d0233c31d52544aef63959c56aa5c634818898f465d5717918d654266",
                    "enabled": 1
                }
            },
            "privoxy": {
                "100": {
                    "checksum": "sha256:95e34699603fb38d98bc4491202d783f96ad0d51dd80cf80fac65f45b6fc1a4c",
                    "enabled": 1
                }
            },
            "procmail": {
                "100": {
                    "checksum": "sha256:1fea11fb5b09a5956ca32654374d35ef281093f98cda7d0bc462d1b2a9cfcdd4",
                    "enabled": 1
                }
            },
            "prosody": {
                "100": {
                    "checksum": "sha256:e2b0a84c1151d1f51128b53a7f406701188ef5c8ceb18a733db4f62d58a19b98",
                    "enabled": 1
                }
            },
            "psad": {
                "100": {
                    "checksum": "sha256:ecfa830cf53375b2ea1c0fb0921f5adeb47a4471488765fa43e724d7f5e9a11f",
                    "enabled": 1
                }
            },
            "ptchown": {
                "100": {
                    "checksum": "sha256:870f119b4194e42aff2f71722fb1fb11868f88d3bd2f323eacbdefeea2a9ef4e",
                    "enabled": 1
                }
            },
            "publicfile": {
                "100": {
                    "checksum": "sha256:6056d698ab7914842d62ef8908402e481e02014fbcf03c984df01e768f30abf8",
                    "enabled": 1
                }
            },
            "pulseaudio": {
                "100": {
                    "checksum": "sha256:4a01f517ea0fd510aaac2e918afaef70e40175f2c4744d96bc1fd9647c915e1f",
                    "enabled": 1
                }
            },
            "puppet": {
                "100": {
                    "checksum": "sha256:35ea9020284c9fde1e544bb2b15698ea8b3ae46a3187539542ead64bf563020d",
                    "enabled": 1
                }
            },
            "pwauth": {
                "100": {
                    "checksum": "sha256:ef67d14c742393291981705da797a401a758833e7ab4f3a116cce7b662836761",
                    "enabled": 1
                }
            },
            "qatlib": {
                "100": {
                    "checksum": "sha256:3714e0a36d43f8667e80c187637d847425155bde011be321043371b15098e3c8",
                    "enabled": 1
                }
            },
            "qmail": {
                "100": {
                    "checksum": "sha256:e99893e32bdfbe81a09e2b01a27cf0ea8865e54e3b0fcb1563637a4ed59455b2",
                    "enabled": 1
                }
            },
            "qpid": {
                "100": {
                    "checksum": "sha256:cd5654f248ed789cc12534dac789b9c1d3d32d325dceedb4eb27afa2c6c61780",
                    "enabled": 1
                }
            },
            "quantum": {
                "100": {
                    "checksum": "sha256:aa4c8076bcd3d92db74d5e2394d885e6b10d729b86081f1683e349ac6da41794",
                    "enabled": 1
                }
            },
            "quota": {
                "100": {
                    "checksum": "sha256:f34ce67cab4573756019b1589e0e518c4933ef76887e0437f0ae582f6f703a9b",
                    "enabled": 1
                }
            },
            "rabbitmq": {
                "100": {
                    "checksum": "sha256:a759f5eba8608e6190b1649aeb7122d50de1b985878d9c1d5822bef5bc2b88e8",
                    "enabled": 1
                }
            },
            "radius": {
                "100": {
                    "checksum": "sha256:ad64588fda03fd0d8c6e9b7b4afa31b20472df41ee50b68b8e9f07d6878dcf81",
                    "enabled": 1
                }
            },
            "radvd": {
                "100": {
                    "checksum": "sha256:225787ffe39a022ba6c552cd389e2ddb613353c5ca65bbd572d67ccf7dbdef6b",
                    "enabled": 1
                }
            },
            "raid": {
                "100": {
                    "checksum": "sha256:0da1f5f76dcf060623ca3599040b0c03e5626b2624bd74d3502697ef1e11f387",
                    "enabled": 1
                }
            },
            "rasdaemon": {
                "100": {
                    "checksum": "sha256:bdb6f062284dd7b12282604373958122db4d18b262bfd844520e919ed2845e5a",
                    "enabled": 1
                }
            },
            "rdisc": {
                "100": {
                    "checksum": "sha256:4d86b4c1044e1a8766f6ac6ade0296aa461ef5550efae9aeabc99a5c946936f6",
                    "enabled": 1
                }
            },
            "readahead": {
                "100": {
                    "checksum": "sha256:43044ec71b5839b47dc5fa30b7d9ba500908143b64ad3b608775736a44b046d5",
                    "enabled": 1
                }
            },
            "realmd": {
                "100": {
                    "checksum": "sha256:bcad6a7d597f894f1985bf46623a62ac2cbeff634770542c93e40a5fd7be93a9",
                    "enabled": 1
                }
            },
            "redis": {
                "100": {
                    "checksum": "sha256:075a71c98c377420b53c7b584f5d963598d97e7e49f58eb67bf0a5be1b20a908",
                    "enabled": 1
                }
            },
            "remotelogin": {
                "100": {
                    "checksum": "sha256:e6f98b186ddfff611082d29031ae948dd23c737d7ff1d713760d1794906698ae",
                    "enabled": 1
                }
            },
            "restraint": {
                "400": {
                    "checksum": "sha256:95d0d03fbc1d4147f02a0b3da7cd76efbdd75d1f5812cf6160e922336abbf270",
                    "enabled": 1
                }
            },
            "rhcd": {
                "100": {
                    "checksum": "sha256:92774c1cc6fd16156001c4facda140bb33ddba4269198bd016bda6d92eac079e",
                    "enabled": 1
                }
            },
            "rhcs": {
                "100": {
                    "checksum": "sha256:9c65d7909e4c443dc490c9b58a6e6e5471e58b7e934d10f08359db09b5fc395e",
                    "enabled": 1
                }
            },
            "rhev": {
                "100": {
                    "checksum": "sha256:03591f21a98cba8e5f4c5272a799067eca3ae1520a02dd50c13a607a318dfcc1",
                    "enabled": 1
                }
            },
            "rhgb": {
                "100": {
                    "checksum": "sha256:9b0fc4d87d27875c84b7c21c3b99d0af2e52903b611cb360804fe9f50f9d6f7a",
                    "enabled": 1
                }
            },
            "rhnsd": {
                "100": {
                    "checksum": "sha256:271d37f30d3a338cc9bd6199a488d48a7c88068675c462df5071bca8c1f7c438",
                    "enabled": 1
                }
            },
            "rhsmcertd": {
                "100": {
                    "checksum": "sha256:82815996833cc475e386b8e94b87b1516dd876fccd5b2efd4c88ccc4a0854e6d",
                    "enabled": 1
                }
            },
            "rhts": {
                "400": {
                    "checksum": "sha256:39f15722a115b11064c20b8bc2758e8fe06531a8f923ea00a3e7079a0554e3d6",
                    "enabled": 1
                }
            },
            "ricci": {
                "100": {
                    "checksum": "sha256:308e6f81ea6fe3a196db021ad12cb7baae8bdd19f212bdc1f8ab404c27019abe",
                    "enabled": 1
                }
            },
            "rkhunter": {
                "100": {
                    "checksum": "sha256:aef52847742df6eecd94fe50a9fd5021637088620a576daa6659b9783b9d8553",
                    "enabled": 1
                }
            },
            "rkt": {
                "100": {
                    "checksum": "sha256:99e9cda55e22a71ebb3d74c56051f69ae895dd8134b627dcafda4b0a925e9ae9",
                    "enabled": 1
                }
            },
            "rlogin": {
                "100": {
                    "checksum": "sha256:6d7c850c1ee0942bd60c30a8f112b82fb182a24bc594d3707bf7801c4b80d5ad",
                    "enabled": 1
                }
            },
            "rngd": {
                "100": {
                    "checksum": "sha256:8b5d8041e76b9fdbad0d45ad1a37975171e424e56718dc139a93063729905cd5",
                    "enabled": 1
                }
            },
            "rolekit": {
                "100": {
                    "checksum": "sha256:01ae038a225e72270a6acc6bc6cc0b36c3b09a10e68112da9ec1b9d91fb414d5",
                    "enabled": 1
                }
            },
            "roundup": {
                "100": {
                    "checksum": "sha256:3b74654cbf5033ee6ab8c2dbc22773af846c129879c2b7355bc99df7c293833c",
                    "enabled": 1
                }
            },
            "rpc": {
                "100": {
                    "checksum": "sha256:c97daf9137ca479db4a7315e77f4475158475e674a12e1f42fa97f9db6cb398e",
                    "enabled": 1
                }
            },
            "rpcbind": {
                "100": {
                    "checksum": "sha256:a369faf5cb76fd3dd29929a38cd6b3221e7f98cb3c57675cfeeef9736b041283",
                    "enabled": 1
                }
            },
            "rpm": {
                "100": {
                    "checksum": "sha256:ededd2ec5ee4506eab2315599bf43a3deb8ceb83686c97406722968f5e93d759",
                    "enabled": 1
                }
            },
            "rrdcached": {
                "100": {
                    "checksum": "sha256:d542bd71ac70b65fbe712194a3727e826ac414096230de7bc5c4a2aea037756f",
                    "enabled": 1
                }
            },
            "rshd": {
                "100": {
                    "checksum": "sha256:d87f4f7f764a6282dccdfba116b34296f94f62e44c8ac2b51ae6ae7850be63e2",
                    "enabled": 1
                }
            },
            "rshim": {
                "100": {
                    "checksum": "sha256:0ddb10f8d7a2bcd92cc2e68302467326c643e02a5623151c3168135a3ec290de",
                    "enabled": 1
                }
            },
            "rssh": {
                "100": {
                    "checksum": "sha256:d5958076535790d5bad592f7eb70977ac9437bc0a7f97b34e431b9f414973648",
                    "enabled": 1
                }
            },
            "rsync": {
                "100": {
                    "checksum": "sha256:a0a5f9fa55b3888c84c566cce656011bc1ad8dab2c4b700ea6bf2341f556d590",
                    "enabled": 1
                }
            },
            "rtas": {
                "100": {
                    "checksum": "sha256:341b47d041b0df0aeadce1cd1a3cfa195aa9c5569d8f998edcd0169c13017894",
                    "enabled": 1
                }
            },
            "rtkit": {
                "100": {
                    "checksum": "sha256:5fab287cedea124b92aecc21550dafa4218805485040915716c00486d9cf04ca",
                    "enabled": 1
                }
            },
            "rwho": {
                "100": {
                    "checksum": "sha256:75872a4c3a9922ba6935d078f68aab2b562705085e258aeb7dd2bfc3e087615c",
                    "enabled": 1
                }
            },
            "samba": {
                "100": {
                    "checksum": "sha256:a2ea92601c02a22ffb4551f8bbf53174f7c363e07ebe198e74dafe3ebedd82d3",
                    "enabled": 1
                }
            },
            "sambagui": {
                "100": {
                    "checksum": "sha256:8ead836404a7e8a8b68aabeee3c649c214df9699b45f6c784989b3fcdd4f9e1a",
                    "enabled": 1
                }
            },
            "sandboxX": {
                "100": {
                    "checksum": "sha256:0776fe05eb5bcea62b434f30b893f79c06c7a18f352de24ed2546817f566c429",
                    "enabled": 1
                }
            },
            "sanlock": {
                "100": {
                    "checksum": "sha256:28c6186dc1bc711d42a3d1d2ff051038f0dd4a0259544e52f68c61139efd3a4e",
                    "enabled": 1
                }
            },
            "sap": {
                "100": {
                    "checksum": "sha256:1ba912e3e1e441a6b3f7b88c5603ff8ae915efdee90a76ae34e41d8556d851b0",
                    "enabled": 1
                }
            },
            "sasl": {
                "100": {
                    "checksum": "sha256:61fd6af55a226605d0ad608c145c6650ccb29b31d7ccf50e32b95ec7686c53b3",
                    "enabled": 1
                }
            },
            "sbd": {
                "100": {
                    "checksum": "sha256:f4a521054bd52ace05da7d520aabd132df773acf3037e2f414d81fe27f9ef04a",
                    "enabled": 1
                }
            },
            "sblim": {
                "100": {
                    "checksum": "sha256:9f9bcb3b8190d76c1381443107531fc17c78be637320f00310e4784a7ebc5c3a",
                    "enabled": 1
                }
            },
            "screen": {
                "100": {
                    "checksum": "sha256:39b8212d5ea27912138139bcaf7834e5038efe89b88c2166571ed2c249eb202a",
                    "enabled": 1
                }
            },
            "secadm": {
                "100": {
                    "checksum": "sha256:6816f59c4202654c15adac72d78c832d1cc9a559363bfd51e6a0345779fe8dea",
                    "enabled": 1
                }
            },
            "sectoolm": {
                "100": {
                    "checksum": "sha256:515aa85f67c92cfedced542c1222bd136f8fb51801166f807145141acf212736",
                    "enabled": 1
                }
            },
            "selinuxutil": {
                "100": {
                    "checksum": "sha256:428f92fd885af8659db055061639069b8be8ab2d2386beb612db65261aa12673",
                    "enabled": 1
                }
            },
            "sendmail": {
                "100": {
                    "checksum": "sha256:9ec599103477e82c5a8da5e47ae1056cc519ccb236e171f9acfcc748af47b679",
                    "enabled": 1
                }
            },
            "sensord": {
                "100": {
                    "checksum": "sha256:a422c0e74e5b98d753f582bc2adacd6be541580fdd4b226ccd9cd05ece2eba08",
                    "enabled": 1
                }
            },
            "setrans": {
                "100": {
                    "checksum": "sha256:96030787f55e1e8c4d76f22919ca8dcf17a16cd08de745aad5b7f740f14c1958",
                    "enabled": 1
                }
            },
            "setroubleshoot": {
                "100": {
                    "checksum": "sha256:20f38095965fcc5d0b454413085c7609588086942ae89a65d19d6e6e0a06a9ee",
                    "enabled": 1
                }
            },
            "seunshare": {
                "100": {
                    "checksum": "sha256:fc381f26f9cbc83f72d5063e4d028a5365401a202052012755fa49ea63a51a42",
                    "enabled": 1
                }
            },
            "sge": {
                "100": {
                    "checksum": "sha256:3ff3ca366bd0571039f42bf990acbe10aed992be87c89450536e2fdc0e31961a",
                    "enabled": 1
                }
            },
            "shorewall": {
                "100": {
                    "checksum": "sha256:19e4f50a756989bafc3d30aa2679b9730c5a297e1aa20f71425f024fe934c574",
                    "enabled": 1
                }
            },
            "slocate": {
                "100": {
                    "checksum": "sha256:15789a6c47c2eba6ef224a7f4464819b37ed76cc6d7efadc7b1be0f212c85046",
                    "enabled": 1
                }
            },
            "slpd": {
                "100": {
                    "checksum": "sha256:404e3e22459d7cb94c12408a3bc9d320f58eee24788ac11648318722d1bc0bee",
                    "enabled": 1
                }
            },
            "smartmon": {
                "100": {
                    "checksum": "sha256:628fc3630bfcc5437ffbe528c3c4c0d7a08130b7b01cb1a66cd630bf05eb8795",
                    "enabled": 1
                }
            },
            "smokeping": {
                "100": {
                    "checksum": "sha256:f05ecf227e69b096900cc9fcd863a6d5457d64d1c0c41b9b1fc9aac20d02160d",
                    "enabled": 1
                }
            },
            "smoltclient": {
                "100": {
                    "checksum": "sha256:92f62e04b6f14736d375aae3c22f2da5edec288a6997212d194e062501a7128b",
                    "enabled": 1
                }
            },
            "smsd": {
                "100": {
                    "checksum": "sha256:006443b6c33a37037fdc4dc689bbfc7695251a2766429859137555797652aa33",
                    "enabled": 1
                }
            },
            "snapper": {
                "100": {
                    "checksum": "sha256:2af2c59f061d181581c0ee972630cac466f74d873731de2aa4a27dd0b9fdad9b",
                    "enabled": 1
                }
            },
            "snmp": {
                "100": {
                    "checksum": "sha256:c9ef0cdfb822eba65e29c8bd6594ad8cf9bc5a7cdc3aeef553475c7127619d4b",
                    "enabled": 1
                }
            },
            "snort": {
                "100": {
                    "checksum": "sha256:4068e4127dc3f2252006ed676a6c27c3ee34df690139c8d5c55813ea30e7ceed",
                    "enabled": 1
                }
            },
            "sosreport": {
                "100": {
                    "checksum": "sha256:25fdb658f00c0a3bc753a69bfb58d2f054903e7000ad0c7788c3eb712d79bac6",
                    "enabled": 1
                }
            },
            "soundserver": {
                "100": {
                    "checksum": "sha256:a8a0fa6265d7b4b17243ff1fca6f0ba49135e12d0ded004bb7c515291f30641b",
                    "enabled": 1
                }
            },
            "spamassassin": {
                "100": {
                    "checksum": "sha256:6a40d1bd53affea088d732dfa1cd97b0dbb30d88a5667ccf25c148942e3153b5",
                    "enabled": 1
                }
            },
            "speech-dispatcher": {
                "100": {
                    "checksum": "sha256:f23e5facc957ee792b529612c165040e87df4a7b49dc09c7887d2c5e6e4d7e41",
                    "enabled": 1
                }
            },
            "squid": {
                "100": {
                    "checksum": "sha256:68b2c3f88c1457ed4474f6ebbc85329d444acf473b25b0c505d58ee338399176",
                    "enabled": 1
                }
            },
            "ssh": {
                "100": {
                    "checksum": "sha256:309d6aa526bdbffec6b49778a7d4f369cfad582b78e54e1a2bb734e0e555fd16",
                    "enabled": 1
                }
            },
            "sslh": {
                "100": {
                    "checksum": "sha256:008b343789eb4b2aef06e0eace24fb651fe60e8851b9f86bf5aa8b5e6eba8092",
                    "enabled": 1
                }
            },
            "sssd": {
                "100": {
                    "checksum": "sha256:3e002e9f28e23f909ff86d1fbd93f16f5fa23e4803988f99eab78fcb5ea968c2",
                    "enabled": 1
                }
            },
            "staff": {
                "100": {
                    "checksum": "sha256:b63340340998907e1fb3fb8cc69f3b7586ce8b573f5cd7bf45b57234f96141bd",
                    "enabled": 1
                }
            },
            "stalld": {
                "100": {
                    "checksum": "sha256:9d57f2a8aab33054bd5a1425ab0f76cdc5b983aac1df3353e7945b21e204667b",
                    "enabled": 1
                }
            },
            "stapserver": {
                "100": {
                    "checksum": "sha256:67d0f2920bcead63390df082d6e187d9317e6ac0330fbd2f40cc29b054845b16",
                    "enabled": 1
                }
            },
            "stratisd": {
                "100": {
                    "checksum": "sha256:7581441e9dd2586371712d1c173061e6e3c0085e8654c97940963b168a7ea3ef",
                    "enabled": 1
                }
            },
            "stunnel": {
                "100": {
                    "checksum": "sha256:b6dbad9b252aec8e5a939b3e08454119cbea8725c83e410c260d6a06a388cd8f",
                    "enabled": 1
                }
            },
            "su": {
                "100": {
                    "checksum": "sha256:857ff8acea720516985a8942d946c75b39ab4150b8983cdc0ba3ed99c82d1885",
                    "enabled": 1
                }
            },
            "sudo": {
                "100": {
                    "checksum": "sha256:9877c22302189668e7a948cfda4273f7455d6f2ecec0bb6e1f3ffb2a217dc9d0",
                    "enabled": 1
                }
            },
            "svnserve": {
                "100": {
                    "checksum": "sha256:bb2418014f2be4d6b143586490bba8f56ee56d0b6a938b795118fa204f829016",
                    "enabled": 1
                }
            },
            "swift": {
                "100": {
                    "checksum": "sha256:8ffa1ab3ff17d35c16b701936fb37a4a1f398c801c51310cc084cebc0acf4f7c",
                    "enabled": 1
                }
            },
            "sysadm": {
                "100": {
                    "checksum": "sha256:91f18d5ce94b0b9ad4cd41e3b1eb1f5f3b1c3dcf3f52c25d5f49db29512ac906",
                    "enabled": 1
                }
            },
            "sysadm_secadm": {
                "100": {
                    "checksum": "sha256:26c80707a4a08f665d249670d9329eda36af31bdbb084d49e72198221090b7b8",
                    "enabled": 1
                }
            },
            "sysnetwork": {
                "100": {
                    "checksum": "sha256:3d4405968a8c9095460ab7e66da67f4e1168eb7194d630559aa58b78bdb25135",
                    "enabled": 1
                }
            },
            "sysstat": {
                "100": {
                    "checksum": "sha256:b767e9a66c473f2e3bb06336f0119a220bf08ef4380333c16b19acd05fb40f6d",
                    "enabled": 1
                }
            },
            "systemd": {
                "100": {
                    "checksum": "sha256:21181339b1f50691a6e73a96e9000604788b3b41e7a143fe3f788d4f0f576c99",
                    "enabled": 1
                }
            },
            "tangd": {
                "100": {
                    "checksum": "sha256:8be8dbcf7392331a36d09547dc7a136f37b21e7eefacc7a849dd3ea29a8c2f1a",
                    "enabled": 1
                }
            },
            "targetd": {
                "100": {
                    "checksum": "sha256:61a7d4ed0bbad560b1507fb13a27cb80f0ba9177e188369ee0388e208ad9a86e",
                    "enabled": 1
                }
            },
            "tcpd": {
                "100": {
                    "checksum": "sha256:f90f0a080b7822408e608f8ace404c2b3a7cce145fafda16d16c39c90412dd7b",
                    "enabled": 1
                }
            },
            "tcsd": {
                "100": {
                    "checksum": "sha256:e876f3f8dc496189fa7344870ca42edfd421dda506dcaaeaac67881f1c4a9f13",
                    "enabled": 1
                }
            },
            "telepathy": {
                "100": {
                    "checksum": "sha256:28d44df10f515e1f265d7bbdf51a377351efaaf2831b6bcc1ced928a358436ac",
                    "enabled": 1
                }
            },
            "telnet": {
                "100": {
                    "checksum": "sha256:7da89bf5c9cf25955b4516e9fa763239b7bb23084aad6a04c1933fd50cf315e9",
                    "enabled": 1
                }
            },
            "tftp": {
                "100": {
                    "checksum": "sha256:05185332feaed95a99162df1b3dc2bfb37c879d514db78015c4c083ca57028fd",
                    "enabled": 1
                }
            },
            "tgtd": {
                "100": {
                    "checksum": "sha256:178828a7d7ff1c30a3df812905214bce9de2ca0744dbe4dfe01691deb01d629e",
                    "enabled": 1
                }
            },
            "thin": {
                "100": {
                    "checksum": "sha256:2a20802eaae04dc1cf81bd9149ebd4db8e88eec3388b2d50bf078826d24994f0",
                    "enabled": 1
                }
            },
            "thumb": {
                "100": {
                    "checksum": "sha256:dffaeb8dd84c75f302c5cab90c3e892c5f6e6efa26167a9f8efe949ac613f99d",
                    "enabled": 1
                }
            },
            "timedatex": {
                "100": {
                    "checksum": "sha256:f29a6896ff0b88c513fe7998390aae2ebe6c1d6f5785e10c6ca33c6411a8bfdf",
                    "enabled": 1
                }
            },
            "tlp": {
                "100": {
                    "checksum": "sha256:9fd7f4b68daa0c6c8597e029f0a269e89731c2169d3f5d296062ca682d4e3ebf",
                    "enabled": 1
                }
            },
            "tmpreaper": {
                "100": {
                    "checksum": "sha256:9c14537b3fc129bf5e108017c16a8b5175f1c8a50ccb9d578bedb620e0e32503",
                    "enabled": 1
                }
            },
            "tomcat": {
                "100": {
                    "checksum": "sha256:48d267290c8bb1bb35a48bede09286690dde4c2991be32256776137569f6c586",
                    "enabled": 1
                }
            },
            "tor": {
                "100": {
                    "checksum": "sha256:18a978369799a6144a4dd7e0cab365a6c200218e64b453a70dd501613de3e379",
                    "enabled": 1
                }
            },
            "tuned": {
                "100": {
                    "checksum": "sha256:6455edecd5a99a3f6a3d28887408b6e4a65532965072d0733ba271e66712345b",
                    "enabled": 1
                }
            },
            "tvtime": {
                "100": {
                    "checksum": "sha256:80d528807e7b8e942a801bdfab3fc402dbc09163536462ccd8a678dcfba4725c",
                    "enabled": 1
                }
            },
            "udev": {
                "100": {
                    "checksum": "sha256:7f90ada00012cf15d357503034f98494f56c9f1e6bb82249e4a2092fe0991be5",
                    "enabled": 1
                }
            },
            "ulogd": {
                "100": {
                    "checksum": "sha256:cd675dd586c7bf94418a5fdddc2f87502970ec19911356ec1d628b109a339dcf",
                    "enabled": 1
                }
            },
            "uml": {
                "100": {
                    "checksum": "sha256:612f53a60d4b66f641a379ce5f96d27fe6214edf138a579be295d4fcabf28b94",
                    "enabled": 1
                }
            },
            "unconfined": {
                "100": {
                    "checksum": "sha256:a1fbd41ce1ac921d18a97dbcb741ce9a1cdd7fe6867c676400c648f713603052",
                    "enabled": 1
                }
            },
            "unconfineduser": {
                "100": {
                    "checksum": "sha256:b52c925f59435ee09feb687d1eedf5b5e1c646dc027036efa82cbc3ccb7eb93e",
                    "enabled": 1
                }
            },
            "unlabelednet": {
                "100": {
                    "checksum": "sha256:8674a308bc39857ae30458a7ffe592c50fa6c61ed9efbda30016b9f5fcc55cb8",
                    "enabled": 1
                }
            },
            "unprivuser": {
                "100": {
                    "checksum": "sha256:4a64228f847c36aa2591fc3818893cd964730a7971bd87ed50b0e382ee83d881",
                    "enabled": 1
                }
            },
            "updfstab": {
                "100": {
                    "checksum": "sha256:81736986d7599acaab02dd9a6512d81ea7f59476edccbe71b3ef32da493f5cd2",
                    "enabled": 1
                }
            },
            "usbmodules": {
                "100": {
                    "checksum": "sha256:77c24ef9ae5b3e19e29054146e0b09b7fed37fb70aff8e9d53e85e262050d22a",
                    "enabled": 1
                }
            },
            "usbmuxd": {
                "100": {
                    "checksum": "sha256:025da4e2c2b5788f98226701462c27cefe41ce6abb617c699a29f5568678dae7",
                    "enabled": 1
                }
            },
            "userdomain": {
                "100": {
                    "checksum": "sha256:7762b92e8c8abf9e77a3cc23071aa6ce461d57fc4c629858f23195fb7a74d789",
                    "enabled": 1
                }
            },
            "userhelper": {
                "100": {
                    "checksum": "sha256:7bf185d5031851326d2d108c0d70d9653b1dc80d77f9d7f58d66250df509245f",
                    "enabled": 1
                }
            },
            "usermanage": {
                "100": {
                    "checksum": "sha256:f69db7e7153bd486110c500cb4817d274c159179afc4aa01eb1b86af39f01483",
                    "enabled": 1
                }
            },
            "usernetctl": {
                "100": {
                    "checksum": "sha256:8f0c31f0713ab3e13fcea14186a5b0e749791dd7de8967efe21b6b427d1e0378",
                    "enabled": 1
                }
            },
            "uucp": {
                "100": {
                    "checksum": "sha256:2eb5e03e9e4a0a294e97adcaa6e188881917f403561330d4c585f24c0e8801f0",
                    "enabled": 1
                }
            },
            "uuidd": {
                "100": {
                    "checksum": "sha256:2348ef1729d8e40d4287325736452cce1c24fcdf5af11df06d315e6a3ac6acf6",
                    "enabled": 1
                }
            },
            "varnishd": {
                "100": {
                    "checksum": "sha256:ecfe0c70380e1129186a81bc8778fe5904c88fd9c616ad69851241dd57d7fd78",
                    "enabled": 1
                }
            },
            "vdagent": {
                "100": {
                    "checksum": "sha256:8c8faf892551960dce6c61dbc2145ee637921a7ff62cc96328978d9946024ebd",
                    "enabled": 1
                }
            },
            "vhostmd": {
                "100": {
                    "checksum": "sha256:04f9af430360595438f29cdba50fe3ce96fbe3a59cd1396d168cf3be57469538",
                    "enabled": 1
                }
            },
            "virt": {
                "100": {
                    "checksum": "sha256:d94f6df278181c096f0e7a90cbee1dc1ab07bd37b3d3577997f40eca5c3df8fd",
                    "enabled": 1
                }
            },
            "vlock": {
                "100": {
                    "checksum": "sha256:e213b634c2093ebf894adad9287c670dd73e43c79538c1e11d6b0cff574fd23d",
                    "enabled": 1
                }
            },
            "vmtools": {
                "100": {
                    "checksum": "sha256:9d2650b10280e0ecbdbb20692515598049e290ebfb426eafa5c0b067a2d33bf1",
                    "enabled": 1
                }
            },
            "vmware": {
                "100": {
                    "checksum": "sha256:e1a096c9fa1aa6c4244c3cf8340c14a67ba60ade122a7bb5167604c4cdc2e341",
                    "enabled": 1
                }
            },
            "vnstatd": {
                "100": {
                    "checksum": "sha256:03ccc49fc408c718f89b93502f1a7073efc8d9f81d18bcb69cede46300340130",
                    "enabled": 1
                }
            },
            "vpn": {
                "100": {
                    "checksum": "sha256:71544fa054595557124ab7098947a966e33b98584eb1345f955e754be531038e",
                    "enabled": 1
                }
            },
            "w3c": {
                "100": {
                    "checksum": "sha256:57482e874ec657d4d2a5840a4de5524df24b14e974e1a19e42bffc12428a5eca",
                    "enabled": 1
                }
            },
            "watchdog": {
                "100": {
                    "checksum": "sha256:81379d3f5bd7462ef59a44e4e6bbc5d5261fc8633be7a7ed9da248b7ed47700a",
                    "enabled": 1
                }
            },
            "wdmd": {
                "100": {
                    "checksum": "sha256:f0cec2f5898981b4e2768802facb4eee17c42e46d8da58b20467d7fd5ba0ed3b",
                    "enabled": 1
                }
            },
            "webadm": {
                "100": {
                    "checksum": "sha256:994f8fb7a32079d30b68a1cc4c51fe6a10e425c6145a689e32ac0053a2ded7c4",
                    "enabled": 1
                }
            },
            "webalizer": {
                "100": {
                    "checksum": "sha256:ec56ec85299e7b46853f9d34abae5c56aba7244054e48ac40cb4cf6dee602dc1",
                    "enabled": 1
                }
            },
            "wine": {
                "100": {
                    "checksum": "sha256:2750d4c101cacb336bb717f3beba2e0967ce6d957609f647e6f92966536894c6",
                    "enabled": 1
                }
            },
            "wireguard": {
                "100": {
                    "checksum": "sha256:c039f714e791b72444890960412088848de91a23b909cab26386369f6fa55b67",
                    "enabled": 1
                }
            },
            "wireshark": {
                "100": {
                    "checksum": "sha256:60940e6f75a00c73fd15eea03eb69aa8151b3457020034910aa0a2b714cc2241",
                    "enabled": 1
                }
            },
            "xen": {
                "100": {
                    "checksum": "sha256:92dfa4c75176cfa571f1310629ba5befe920f6cbbec03144aa23a87a1e27e2f3",
                    "enabled": 1
                }
            },
            "xguest": {
                "100": {
                    "checksum": "sha256:1862f5d7cfb921ef8b6adf56afd19db431a8f1aca74a38f46a543b83c0d02ac7",
                    "enabled": 1
                }
            },
            "xserver": {
                "100": {
                    "checksum": "sha256:ce38e8a07ff5e1061cc51311f31a91233e02fd23d57ee21b9977c02ae6e361c1",
                    "enabled": 1
                }
            },
            "zabbix": {
                "100": {
                    "checksum": "sha256:bf751940e6d5acd7944addf8099e67e309b367b70a5ffba89a437301b7251619",
                    "enabled": 1
                }
            },
            "zarafa": {
                "100": {
                    "checksum": "sha256:cf760718fd967208648f32ae3528e9d42e7e2933487d3052bd5809474fc577ec",
                    "enabled": 1
                }
            },
            "zebra": {
                "100": {
                    "checksum": "sha256:bba64baddec0addb05ac038c6a89a5a0fab53e43d797d8353a8a209cd66ca4e2",
                    "enabled": 1
                }
            },
            "zoneminder": {
                "100": {
                    "checksum": "sha256:4e20f61fbbe4afebaa084066e9e4c445c6d7d36e1254642bef5315313333ad40",
                    "enabled": 1
                }
            },
            "zosremote": {
                "100": {
                    "checksum": "sha256:d0b7718e1270a44a288569b8a2f8c0a4931ea45a4c4e04b6264e0ff4f7752283",
                    "enabled": 1
                }
            }
        },
        "selinux_priorities": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Load SELinux modules] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115
Saturday 02 November 2024  19:01:05 -0400 (0:00:03.752)       0:07:36.250 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "selinux_modules is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128
Saturday 02 November 2024  19:01:05 -0400 (0:00:00.186)       0:07:36.436 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136
Saturday 02 November 2024  19:01:05 -0400 (0:00:00.090)       0:07:36.527 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14
Saturday 02 November 2024  19:01:05 -0400 (0:00:00.182)       0:07:36.709 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19
Saturday 02 November 2024  19:01:06 -0400 (0:00:00.527)       0:07:37.236 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__nbde_server_tangd_dir_stat.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35
Saturday 02 November 2024  19:01:06 -0400 (0:00:00.250)       0:07:37.486 ***** 
changed: [managed-node2] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/systemd/system/tangd.socket.d",
    "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44
Saturday 02 November 2024  19:01:07 -0400 (0:00:00.723)       0:07:38.210 ***** 
changed: [managed-node2] => {
    "changed": true,
    "checksum": "cab519df8c21e60fd06ac780e2c7bd41ad441042",
    "dest": "/etc/systemd/system/tangd.socket.d/override.conf",
    "gid": 0,
    "group": "root",
    "md5sum": "fc727969e0bd264a9cc7f9c6bc56714c",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:tangd_unit_file_t:s0",
    "size": 90,
    "src": "/root/.ansible/tmp/ansible-tmp-1730588467.4519415-104109-12976438082210/.source.conf",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53
Saturday 02 November 2024  19:01:08 -0400 (0:00:01.197)       0:07:39.407 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__nbde_server_port_changed": true
    },
    "changed": false
}

TASK [Ensure the desired port is added to firewalld] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57
Saturday 02 November 2024  19:01:08 -0400 (0:00:00.111)       0:07:39.518 ***** 
included: fedora.linux_system_roles.firewall for managed-node2

TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 02 November 2024  19:01:08 -0400 (0:00:00.339)       0:07:39.858 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2

TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 02 November 2024  19:01:09 -0400 (0:00:00.248)       0:07:40.106 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 02 November 2024  19:01:09 -0400 (0:00:00.216)       0:07:40.323 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 02 November 2024  19:01:10 -0400 (0:00:00.642)       0:07:40.965 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__firewall_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 02 November 2024  19:01:10 -0400 (0:00:00.098)       0:07:41.064 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 02 November 2024  19:01:10 -0400 (0:00:00.474)       0:07:41.539 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__firewall_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 02 November 2024  19:01:10 -0400 (0:00:00.126)       0:07:41.665 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: firewalld

TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 02 November 2024  19:01:12 -0400 (0:00:01.463)       0:07:43.128 ***** 
skipping: [managed-node2] => {
    "false_condition": "__firewall_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 02 November 2024  19:01:12 -0400 (0:00:00.116)       0:07:43.245 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 02 November 2024  19:01:12 -0400 (0:00:00.114)       0:07:43.360 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 02 November 2024  19:01:12 -0400 (0:00:00.116)       0:07:43.476 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 02 November 2024  19:01:12 -0400 (0:00:00.123)       0:07:43.600 ***** 
skipping: [managed-node2] => (item=nftables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "nftables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=iptables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "iptables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=ufw)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "ufw",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 02 November 2024  19:01:13 -0400 (0:00:00.325)       0:07:43.926 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": "firewalld",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "basic.target dbus-broker.service dbus.socket polkit.service sysinit.target system.slice",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target network-pre.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ip6tables.service ipset.service shutdown.target ebtables.service iptables.service",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DevicePolicy": "auto",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "18446744073709551615",
        "IOReadOperations": "18446744073709551615",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "18446744073709551615",
        "IOWriteOperations": "18446744073709551615",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13955",
        "LimitNPROCSoft": "13955",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13955",
        "LimitSIGPENDINGSoft": "13955",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "infinity",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemorySwapMax": "infinity",
        "MountAPIVFS": "no",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "sysinit.target dbus.socket system.slice",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartUSec": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22328",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 02 November 2024  19:01:13 -0400 (0:00:00.755)       0:07:44.681 ***** 
changed: [managed-node2] => {
    "changed": true,
    "enabled": true,
    "name": "firewalld",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "sysinit.target dbus.socket basic.target system.slice dbus-broker.service polkit.service",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target network-pre.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ebtables.service ip6tables.service ipset.service iptables.service shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DevicePolicy": "auto",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "18446744073709551615",
        "IOReadOperations": "18446744073709551615",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "18446744073709551615",
        "IOWriteOperations": "18446744073709551615",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13955",
        "LimitNPROCSoft": "13955",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13955",
        "LimitSIGPENDINGSoft": "13955",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "infinity",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemorySwapMax": "infinity",
        "MountAPIVFS": "no",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "dbus.socket sysinit.target system.slice",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartUSec": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22328",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 02 November 2024  19:01:15 -0400 (0:00:01.233)       0:07:45.915 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__firewall_previous_replaced": false,
        "__firewall_python_cmd": "/usr/bin/python3.9",
        "__firewall_report_changed": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 02 November 2024  19:01:15 -0400 (0:00:00.219)       0:07:46.135 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 02 November 2024  19:01:15 -0400 (0:00:00.233)       0:07:46.368 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 02 November 2024  19:01:15 -0400 (0:00:00.195)       0:07:46.563 ***** 
changed: [managed-node2] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => {
    "__firewall_changed": true,
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "immediate": true,
        "permanent": true,
        "port": "7500/tcp",
        "state": "enabled",
        "zone": "public"
    }
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 02 November 2024  19:01:16 -0400 (0:00:00.893)       0:07:47.456 ***** 
skipping: [managed-node2] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "'detailed' in fw[0]",
    "item": {
        "immediate": true,
        "permanent": true,
        "port": "7500/tcp",
        "state": "enabled",
        "zone": "public"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 02 November 2024  19:01:16 -0400 (0:00:00.125)       0:07:47.582 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'detailed' in fw[0]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 02 November 2024  19:01:16 -0400 (0:00:00.088)       0:07:47.670 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 02 November 2024  19:01:16 -0400 (0:00:00.078)       0:07:47.749 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 02 November 2024  19:01:17 -0400 (0:00:00.156)       0:07:47.905 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 02 November 2024  19:01:17 -0400 (0:00:00.072)       0:07:47.978 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 02 November 2024  19:01:17 -0400 (0:00:00.072)       0:07:48.050 ***** 
skipping: [managed-node2] => {
    "false_condition": "__firewall_previous_replaced | bool"
}

TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34
Saturday 02 November 2024  19:01:17 -0400 (0:00:00.147)       0:07:48.198 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39
Saturday 02 November 2024  19:01:18 -0400 (0:00:00.856)       0:07:49.054 ***** 
changed: [managed-node2] => (item=tangd.socket) => {
    "ansible_loop_var": "item",
    "changed": true,
    "enabled": true,
    "item": "tangd.socket",
    "name": "tangd.socket",
    "state": "started",
    "status": {
        "Accept": "yes",
        "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "system.slice systemd-journald.socket sysinit.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Backlog": "4096",
        "Before": "sockets.target shutdown.target",
        "BindIPv6Only": "default",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "Broadcast": "no",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "no",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DeferAcceptUSec": "0",
        "Delegate": "no",
        "Description": "Tang Server socket",
        "DevicePolicy": "auto",
        "DirectoryMode": "0755",
        "Documentation": "\"man:tang(8)\"",
        "DropInPaths": "/etc/systemd/system/tangd.socket.d/override.conf",
        "DynamicUser": "no",
        "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "FailureAction": "none",
        "FileDescriptorName": "tangd.socket",
        "FinalKillSignal": "9",
        "FlushPending": "no",
        "FragmentPath": "/usr/lib/systemd/system/tangd.socket",
        "FreeBind": "no",
        "FreezerState": "running",
        "GID": "[not set]",
        "IOAccounting": "no",
        "IOReadBytes": "18446744073709551615",
        "IOReadOperations": "18446744073709551615",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "18446744073709551615",
        "IOWriteOperations": "18446744073709551615",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "IPTOS": "-1",
        "IPTTL": "-1",
        "Id": "tangd.socket",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeepAlive": "no",
        "KeepAliveIntervalUSec": "0",
        "KeepAliveProbes": "0",
        "KeepAliveTimeUSec": "0",
        "KeyringMode": "shared",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13955",
        "LimitNPROCSoft": "13955",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13955",
        "LimitSIGPENDINGSoft": "13955",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "Listen": "[::]:7500 (Stream)",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "Mark": "-1",
        "MaxConnections": "64",
        "MaxConnectionsPerSource": "0",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "infinity",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemorySwapMax": "infinity",
        "MessageQueueMaxMessages": "0",
        "MessageQueueMessageSize": "0",
        "MountAPIVFS": "no",
        "NAccepted": "0",
        "NConnections": "0",
        "NRefused": "0",
        "NUMAPolicy": "n/a",
        "Names": "tangd.socket",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoDelay": "no",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "PassCredentials": "no",
        "PassPacketInfo": "no",
        "PassSecurity": "no",
        "Perpetual": "no",
        "PipeSize": "0",
        "Priority": "-1",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "ReceiveBuffer": "0",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "RemoveIPC": "no",
        "RemoveOnStop": "no",
        "Requires": "sysinit.target system.slice",
        "RestartKillSignal": "15",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "ReusePort": "no",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendBuffer": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "Slice": "system.slice",
        "SocketMode": "0666",
        "SocketProtocol": "0",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22328",
        "TimeoutCleanUSec": "infinity",
        "TimeoutUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Timestamping": "off",
        "Transient": "no",
        "Transparent": "no",
        "TriggerLimitBurst": "200",
        "TriggerLimitIntervalUSec": "2s",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "WatchdogSignal": "6",
        "Writable": "no"
    }
}

TASK [Create encrypted Stratis pool with Clevis/Tang] **************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:240
Saturday 02 November 2024  19:01:19 -0400 (0:00:01.118)       0:07:50.173 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:01:19 -0400 (0:00:00.637)       0:07:50.811 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.226)       0:07:51.037 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.153)       0:07:51.191 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.259)       0:07:51.450 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.161)       0:07:51.612 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.134)       0:07:51.746 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:01:20 -0400 (0:00:00.106)       0:07:51.853 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:01:21 -0400 (0:00:00.148)       0:07:52.002 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:01:21 -0400 (0:00:00.339)       0:07:52.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:01:21 -0400 (0:00:00.113)       0:07:52.456 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": "sda",
            "encryption": true,
            "encryption_clevis_pin": "tang",
            "encryption_password": "yabbadabbadoo",
            "encryption_tang_url": "localhost:7500",
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:01:21 -0400 (0:00:00.165)       0:07:52.621 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:01:21 -0400 (0:00:00.145)       0:07:52.766 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.160)       0:07:52.927 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.169)       0:07:53.096 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.149)       0:07:53.245 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.105)       0:07:53.351 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.156)       0:07:53.508 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:01:22 -0400 (0:00:00.181)       0:07:53.689 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/stratis/foo"
    ],
    "mounts": [],
    "packages": [
        "stratis-cli",
        "xfsprogs",
        "stratisd"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": "tang",
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": "localhost:7500",
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:01:39 -0400 (0:00:17.048)       0:08:10.737 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:01:39 -0400 (0:00:00.071)       0:08:10.808 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588354.2243395,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588353.442331,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588353.442331,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.385)       0:08:11.194 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.401)       0:08:11.595 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.046)       0:08:11.641 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/stratis/foo"
        ],
        "mounts": [],
        "packages": [
            "stratis-cli",
            "xfsprogs",
            "stratisd"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": "tang",
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": "localhost:7500",
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.064)       0:08:11.705 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": "tang",
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": "localhost:7500",
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.055)       0:08:11.761 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:01:40 -0400 (0:00:00.053)       0:08:11.814 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.165)       0:08:11.980 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.072)       0:08:12.053 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.082)       0:08:12.136 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.070)       0:08:12.206 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.051)       0:08:12.257 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.431)       0:08:12.689 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:01:41 -0400 (0:00:00.051)       0:08:12.740 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:253
Saturday 02 November 2024  19:01:42 -0400 (0:00:00.942)       0:08:13.682 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:01:42 -0400 (0:00:00.213)       0:08:13.896 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": "tang",
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": "localhost:7500",
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:01:43 -0400 (0:00:00.122)       0:08:14.018 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:01:43 -0400 (0:00:00.114)       0:08:14.133 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-23d1fec0c5524722bda4bdea41885714-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-23d1fec0c5524722bda4bdea41885714-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "23d1fec0-c552-4722-bda4-bdea41885714"
        },
        "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-thindata",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-flex-thinmeta",
            "size": "6M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-physical-originsub",
            "size": "10G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-d580327e877c45afbdde09632699c1d6-thinpool-pool",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "67c38112-c48b-4f1a-9fa1-88711e3faa15"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:01:43 -0400 (0:00:00.384)       0:08:14.518 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002973",
    "end": "2024-11-02 19:01:43.907886",
    "rc": 0,
    "start": "2024-11-02 19:01:43.904913"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:01:43 -0400 (0:00:00.378)       0:08:14.896 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002819",
    "end": "2024-11-02 19:01:44.323260",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:01:44.320441"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:01:44 -0400 (0:00:00.402)       0:08:15.299 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': 'tang', 'encryption_tang_url': 'localhost:7500', 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:01:44 -0400 (0:00:00.172)       0:08:15.472 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:01:44 -0400 (0:00:00.114)       0:08:15.586 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:01:44 -0400 (0:00:00.205)       0:08:15.792 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:01:44 -0400 (0:00:00.056)       0:08:15.848 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.107)       0:08:15.956 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.052)       0:08:16.009 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.046)       0:08:16.055 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.077)       0:08:16.132 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.083)       0:08:16.216 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.083)       0:08:16.299 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.185)       0:08:16.485 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.074)       0:08:16.559 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.056)       0:08:16.616 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:01:45 -0400 (0:00:00.043)       0:08:16.659 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:211911): WARNING **: 19:01:46.016: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.413)       0:08:17.073 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.063)       0:08:17.136 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.095)       0:08:17.232 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.049)       0:08:17.282 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.136)       0:08:17.419 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.090)       0:08:17.509 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.066)       0:08:17.576 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.052)       0:08:17.628 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.058)       0:08:17.687 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.061)       0:08:17.748 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.052)       0:08:17.801 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:01:46 -0400 (0:00:00.064)       0:08:17.865 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.050)       0:08:17.916 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.052)       0:08:17.968 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.171)       0:08:18.140 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.044)       0:08:18.185 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.097)       0:08:18.282 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.041)       0:08:18.324 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.100)       0:08:18.424 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.078)       0:08:18.502 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.166)       0:08:18.669 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.065)       0:08:18.734 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:01:47 -0400 (0:00:00.056)       0:08:18.791 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:01:48 -0400 (0:00:00.143)       0:08:18.934 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:01:48 -0400 (0:00:00.043)       0:08:18.977 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:01:48 -0400 (0:00:00.104)       0:08:19.082 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.335526",
    "end": "2024-11-02 19:01:48.807701",
    "rc": 0,
    "start": "2024-11-02 19:01:48.472175"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "clevis_config": {
                            "thp": "54Hnq0FDPCYnAiIiQNGRWyTCmVcj1rcArGCracNVXH0",
                            "url": "localhost:7500"
                        },
                        "clevis_pin": "tang",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "23d1fec0-c552-4722-bda4-bdea41885714"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "d580327e-877c-45af-bdde-09632699c1d6"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:01:48 -0400 (0:00:00.707)       0:08:19.790 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "clevis_config": {
                                    "thp": "54Hnq0FDPCYnAiIiQNGRWyTCmVcj1rcArGCracNVXH0",
                                    "url": "localhost:7500"
                                },
                                "clevis_pin": "tang",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "23d1fec0-c552-4722-bda4-bdea41885714"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "d580327e-877c-45af-bdde-09632699c1d6"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:01:48 -0400 (0:00:00.075)       0:08:19.865 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.277)       0:08:20.143 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.103)       0:08:20.246 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.083)       0:08:20.329 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.055)       0:08:20.385 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.069)       0:08:20.455 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.053)       0:08:20.508 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.043)       0:08:20.551 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:256
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.052)       0:08:20.604 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:01:49 -0400 (0:00:00.219)       0:08:20.824 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.087)       0:08:20.911 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.095)       0:08:21.007 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node2] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.189)       0:08:21.196 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.089)       0:08:21.286 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.091)       0:08:21.378 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.088)       0:08:21.466 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.089)       0:08:21.556 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:01:50 -0400 (0:00:00.280)       0:08:21.837 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.096)       0:08:21.934 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": "sda",
            "name": "foo",
            "state": "absent",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.110)       0:08:22.044 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.095)       0:08:22.140 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.073)       0:08:22.214 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.065)       0:08:22.279 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.062)       0:08:22.341 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.052)       0:08:22.394 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.167)       0:08:22.561 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:01:51 -0400 (0:00:00.044)       0:08:22.606 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:01:54 -0400 (0:00:02.657)       0:08:25.264 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:01:54 -0400 (0:00:00.074)       0:08:25.338 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730588354.2243395,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "2b06b132c9b1f74ec4dca585656a9f294c78ba1c",
        "ctime": 1730588353.442331,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 415236291,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730588353.442331,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3857707169",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:01:54 -0400 (0:00:00.381)       0:08:25.719 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.390)       0:08:26.110 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.056)       0:08:26.166 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.056)       0:08:26.223 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.060)       0:08:26.283 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.093)       0:08:26.377 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.177)       0:08:26.554 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.058)       0:08:26.613 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.066)       0:08:26.680 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.087)       0:08:26.767 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:01:55 -0400 (0:00:00.090)       0:08:26.857 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730587881.7952998,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730587879.2202723,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 104857860,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730587879.2212098,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3381617552",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:01:56 -0400 (0:00:00.416)       0:08:27.274 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:01:56 -0400 (0:00:00.051)       0:08:27.325 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:266
Saturday 02 November 2024  19:01:57 -0400 (0:00:00.980)       0:08:28.305 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:01:57 -0400 (0:00:00.408)       0:08:28.714 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:01:57 -0400 (0:00:00.182)       0:08:28.897 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:01:58 -0400 (0:00:00.178)       0:08:29.075 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "d6f52a16-d77d-4c0d-9841-76b856654401"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:01:58 -0400 (0:00:00.440)       0:08:29.516 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002927",
    "end": "2024-11-02 19:01:58.929338",
    "rc": 0,
    "start": "2024-11-02 19:01:58.926411"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:39 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=d6f52a16-d77d-4c0d-9841-76b856654401 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:01:59 -0400 (0:00:00.456)       0:08:29.972 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003412",
    "end": "2024-11-02 19:01:59.419941",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:01:59.416529"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:01:59 -0400 (0:00:00.523)       0:08:30.496 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:01:59 -0400 (0:00:00.247)       0:08:30.743 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.210)       0:08:30.953 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.136)       0:08:31.090 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.093)       0:08:31.183 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.166)       0:08:31.349 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.102)       0:08:31.452 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.071)       0:08:31.524 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.061)       0:08:31.585 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.054)       0:08:31.640 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.050)       0:08:31.690 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:02:00 -0400 (0:00:00.146)       0:08:31.836 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.091)       0:08:31.928 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.087)       0:08:32.015 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.070)       0:08:32.087 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:213106): WARNING **: 19:02:01.447: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.42.118 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.42.118 originally 10.31.42.118
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.42.118 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.416)       0:08:32.503 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.075)       0:08:32.579 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.161)       0:08:32.741 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:02:01 -0400 (0:00:00.084)       0:08:32.825 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.195)       0:08:33.021 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.087)       0:08:33.108 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.093)       0:08:33.201 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.092)       0:08:33.294 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.162)       0:08:33.456 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.088)       0:08:33.545 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.108)       0:08:33.654 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:02:02 -0400 (0:00:00.117)       0:08:33.772 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:02:03 -0400 (0:00:00.148)       0:08:33.921 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:02:03 -0400 (0:00:00.148)       0:08:34.070 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:02:03 -0400 (0:00:00.514)       0:08:34.584 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:02:03 -0400 (0:00:00.132)       0:08:34.717 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:02:04 -0400 (0:00:00.318)       0:08:35.036 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:02:04 -0400 (0:00:00.123)       0:08:35.159 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:02:04 -0400 (0:00:00.342)       0:08:35.502 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:02:04 -0400 (0:00:00.196)       0:08:35.698 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:02:05 -0400 (0:00:00.266)       0:08:35.964 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:02:05 -0400 (0:00:00.095)       0:08:36.060 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:02:05 -0400 (0:00:00.184)       0:08:36.245 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:02:05 -0400 (0:00:00.256)       0:08:36.501 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:02:05 -0400 (0:00:00.167)       0:08:36.669 ***** 
included: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:02:06 -0400 (0:00:00.323)       0:08:36.992 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.336440",
    "end": "2024-11-02 19:02:06.837041",
    "rc": 0,
    "start": "2024-11-02 19:02:06.500601"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:02:06 -0400 (0:00:00.863)       0:08:37.856 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.315)       0:08:38.172 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.133)       0:08:38.305 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.104)       0:08:38.410 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.097)       0:08:38.507 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.106)       0:08:38.614 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.127)       0:08:38.742 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.073)       0:08:38.816 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:02:07 -0400 (0:00:00.075)       0:08:38.891 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node2              : ok=913  changed=25   unreachable=0    failed=0    skipped=1271 rescued=0    ignored=0   

Saturday 02 November 2024  19:02:08 -0400 (0:00:00.374)       0:08:39.266 ***** 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state - 100.28s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 17.05s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 12.86s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 11.22s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 10.47s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 9.49s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.08s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.75s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.75s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.07s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.66s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 2.64s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.61s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.53s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 2.40s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 
fedora.linux_system_roles.storage : Get service facts ------------------- 2.07s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.02s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 
Read the /etc/crypttab file --------------------------------------------- 1.74s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 
Find unused disks in the system ----------------------------------------- 1.62s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 1.54s
/tmp/collections-SZh/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2