ansible-playbook [core 2.17.5]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-S9Z
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.6 (main, Sep  9 2024, 00:00:00) [GCC 14.2.1 20240801 (Red Hat 14.2.1-1)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_change_fs.yml **************************************************
1 plays in /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml

PLAY [Test change fs] **********************************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:2
Saturday 02 November 2024  19:25:13 -0400 (0:00:00.011)       0:00:00.011 ***** 
[WARNING]: Platform linux on host managed-node2 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node2]

TASK [Run the role] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:14
Saturday 02 November 2024  19:25:15 -0400 (0:00:01.434)       0:00:01.446 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.027)       0:00:01.474 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.025)       0:00:01.500 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.046)       0:00:01.547 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.044)       0:00:01.591 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.459)       0:00:02.050 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.024)       0:00:02.075 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.016)       0:00:02.092 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.015)       0:00:02.107 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:25:15 -0400 (0:00:00.045)       0:00:02.153 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: libblockdev libblockdev-crypto libblockdev-dm libblockdev-fs libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:25:16 -0400 (0:00:00.850)       0:00:03.004 ***** 
ok: [managed-node2] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:25:16 -0400 (0:00:00.030)       0:00:03.034 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:25:16 -0400 (0:00:00.029)       0:00:03.064 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:25:17 -0400 (0:00:00.805)       0:00:03.869 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 02 November 2024  19:25:17 -0400 (0:00:00.034)       0:00:03.904 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 02 November 2024  19:25:17 -0400 (0:00:00.031)       0:00:03.935 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 02 November 2024  19:25:17 -0400 (0:00:00.031)       0:00:03.966 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:25:17 -0400 (0:00:00.030)       0:00:03.997 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:25:18 -0400 (0:00:00.920)       0:00:04.918 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_multipath.service": {
                "name": "modprobe@dm_multipath.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:25:20 -0400 (0:00:02.232)       0:00:07.151 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:25:21 -0400 (0:00:00.053)       0:00:07.204 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:25:21 -0400 (0:00:00.016)       0:00:07.221 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:25:21 -0400 (0:00:00.585)       0:00:07.807 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:25:21 -0400 (0:00:00.058)       0:00:07.866 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589314.140663,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "218269ed484ab71984ca70eb56e2318b37e9204e",
        "ctime": 1730589309.768637,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589309.768637,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.403)       0:00:08.270 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.023)       0:00:08.293 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.015)       0:00:08.309 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.022)       0:00:08.332 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.020)       0:00:08.352 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.022)       0:00:08.375 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.035)       0:00:08.410 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.018)       0:00:08.429 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.036)       0:00:08.466 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.036)       0:00:08.503 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.018)       0:00:08.522 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.401)       0:00:08.923 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:25:22 -0400 (0:00:00.030)       0:00:08.954 ***** 
ok: [managed-node2]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:18
Saturday 02 November 2024  19:25:23 -0400 (0:00:01.016)       0:00:09.971 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:25
Saturday 02 November 2024  19:25:23 -0400 (0:00:00.041)       0:00:10.012 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node2

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Saturday 02 November 2024  19:25:23 -0400 (0:00:00.034)       0:00:10.047 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux-core

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Saturday 02 November 2024  19:25:24 -0400 (0:00:00.754)       0:00:10.802 ***** 
ok: [managed-node2] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "filename [xvda2] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.553)       0:00:11.355 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.032)       0:00:11.387 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.037)       0:00:11.425 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.069)       0:00:11.494 ***** 
ok: [managed-node2] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Create a LVM logical volume with default fs_type] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:31
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.030)       0:00:11.525 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.046)       0:00:11.571 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.032)       0:00:11.604 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.041)       0:00:11.645 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.050)       0:00:11.696 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.023)       0:00:11.720 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.022)       0:00:11.742 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.022)       0:00:11.765 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.019)       0:00:11.784 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.052)       0:00:11.837 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.036)       0:00:11.873 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.046)       0:00:11.920 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.023)       0:00:11.943 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.024)       0:00:11.967 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.024)       0:00:11.992 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.023)       0:00:12.015 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.023)       0:00:12.038 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.061)       0:00:12.100 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:25:25 -0400 (0:00:00.025)       0:00:12.125 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:25:28 -0400 (0:00:02.180)       0:00:14.306 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:25:28 -0400 (0:00:00.038)       0:00:14.344 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589314.140663,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "218269ed484ab71984ca70eb56e2318b37e9204e",
        "ctime": 1730589309.768637,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589309.768637,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:25:28 -0400 (0:00:00.388)       0:00:14.733 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.486)       0:00:15.220 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.020)       0:00:15.240 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.026)       0:00:15.266 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.023)       0:00:15.290 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.022)       0:00:15.312 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:25:29 -0400 (0:00:00.038)       0:00:15.351 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:25:30 -0400 (0:00:00.974)       0:00:16.326 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:25:30 -0400 (0:00:00.574)       0:00:16.901 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:25:30 -0400 (0:00:00.078)       0:00:16.979 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:25:31 -0400 (0:00:00.802)       0:00:17.782 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:25:31 -0400 (0:00:00.380)       0:00:18.163 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:25:32 -0400 (0:00:00.019)       0:00:18.182 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:43
Saturday 02 November 2024  19:25:34 -0400 (0:00:01.987)       0:00:20.169 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:25:34 -0400 (0:00:00.056)       0:00:20.226 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:25:34 -0400 (0:00:00.072)       0:00:20.298 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:25:34 -0400 (0:00:00.059)       0:00:20.358 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "5G",
            "type": "lvm",
            "uuid": "c454eb62-a0eb-4a79-89bf-0d471375a06c"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "xOHC09-8dIW-nh4C-0hDX-JdU0-78e6-Sh8o2o"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:25:34 -0400 (0:00:00.533)       0:00:20.892 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003203",
    "end": "2024-11-02 19:25:35.129359",
    "rc": 0,
    "start": "2024-11-02 19:25:35.126156"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:25:35 -0400 (0:00:00.463)       0:00:21.355 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003215",
    "end": "2024-11-02 19:25:35.504619",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:25:35.501404"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:25:35 -0400 (0:00:00.375)       0:00:21.730 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:25:35 -0400 (0:00:00.067)       0:00:21.797 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:25:35 -0400 (0:00:00.021)       0:00:21.818 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.027059",
    "end": "2024-11-02 19:25:35.995872",
    "rc": 0,
    "start": "2024-11-02 19:25:35.968813"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.407)       0:00:22.226 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.027)       0:00:22.253 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.049)       0:00:22.302 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.049)       0:00:22.352 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.454)       0:00:22.806 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.044)       0:00:22.851 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.047)       0:00:22.898 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.043)       0:00:22.942 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.026)       0:00:22.969 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.062)       0:00:23.032 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.029)       0:00:23.061 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:25:36 -0400 (0:00:00.034)       0:00:23.095 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.415)       0:00:23.511 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.062)       0:00:23.573 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.074)       0:00:23.648 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.027)       0:00:23.675 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.024)       0:00:23.700 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.022)       0:00:23.722 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.022)       0:00:23.745 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.019)       0:00:23.765 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.019)       0:00:23.785 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.019)       0:00:23.804 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.044)       0:00:23.849 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.021)       0:00:23.870 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.019)       0:00:23.890 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.021)       0:00:23.912 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.045)       0:00:23.957 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.065)       0:00:24.023 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.037)       0:00:24.060 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.038)       0:00:24.098 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:25:37 -0400 (0:00:00.037)       0:00:24.136 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.037)       0:00:24.174 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.037)       0:00:24.211 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.031)       0:00:24.242 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.040)       0:00:24.283 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.077)       0:00:24.360 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.062)       0:00:24.422 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.033)       0:00:24.456 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.032)       0:00:24.489 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.030)       0:00:24.519 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.036)       0:00:24.556 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.073)       0:00:24.629 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.049)       0:00:24.678 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.026)       0:00:24.705 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.039)       0:00:24.744 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.043)       0:00:24.788 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.041)       0:00:24.830 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.038)       0:00:24.868 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.042)       0:00:24.910 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.038)       0:00:24.949 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.021)       0:00:24.971 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.020)       0:00:24.992 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.045)       0:00:25.037 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.043)       0:00:25.081 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.020)       0:00:25.101 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.020)       0:00:25.122 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.019)       0:00:25.142 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:25:38 -0400 (0:00:00.021)       0:00:25.163 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.019)       0:00:25.183 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.021)       0:00:25.204 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.020)       0:00:25.225 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.057)       0:00:25.282 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.023)       0:00:25.306 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.031)       0:00:25.337 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.050)       0:00:25.387 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.030)       0:00:25.418 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.020)       0:00:25.439 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.025)       0:00:25.464 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.030)       0:00:25.495 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.058)       0:00:25.554 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.068)       0:00:25.622 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.157)       0:00:25.780 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.031)       0:00:25.811 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.045)       0:00:25.856 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.021)       0:00:25.878 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.027)       0:00:25.906 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.021)       0:00:25.927 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.027)       0:00:25.955 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.029)       0:00:25.985 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.028)       0:00:26.013 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.028)       0:00:26.041 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.026)       0:00:26.068 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:25:39 -0400 (0:00:00.025)       0:00:26.093 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.073)       0:00:26.167 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.049)       0:00:26.217 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.044)       0:00:26.261 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.037)       0:00:26.299 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.022)       0:00:26.321 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.023)       0:00:26.344 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.049)       0:00:26.394 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.058)       0:00:26.453 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589928.016337,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730589928.016337,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5141,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730589928.016337,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.392)       0:00:26.846 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.029)       0:00:26.875 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.023)       0:00:26.899 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.030)       0:00:26.929 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.025)       0:00:26.955 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.021)       0:00:26.977 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.026)       0:00:27.003 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:25:40 -0400 (0:00:00.018)       0:00:27.022 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.704)       0:00:27.727 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.023)       0:00:27.750 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.019)       0:00:27.770 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.045)       0:00:27.815 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.019)       0:00:27.835 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.019)       0:00:27.855 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.019)       0:00:27.875 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.021)       0:00:27.896 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.020)       0:00:27.916 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.046)       0:00:27.963 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.044)       0:00:28.007 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.038)       0:00:28.046 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:25:41 -0400 (0:00:00.050)       0:00:28.096 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.075)       0:00:28.172 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.067)       0:00:28.239 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.058)       0:00:28.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.056)       0:00:28.354 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.032)       0:00:28.387 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.030)       0:00:28.417 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.034)       0:00:28.452 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.066)       0:00:28.518 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.047)       0:00:28.566 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.033)       0:00:28.600 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.026)       0:00:28.626 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:25:42 -0400 (0:00:00.029)       0:00:28.656 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:25:43 -0400 (0:00:00.576)       0:00:29.232 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:25:43 -0400 (0:00:00.456)       0:00:29.688 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "5368709120"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:25:43 -0400 (0:00:00.082)       0:00:29.771 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:25:43 -0400 (0:00:00.040)       0:00:29.811 ***** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.459)       0:00:30.270 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.086)       0:00:30.357 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.152)       0:00:30.509 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.063)       0:00:30.572 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.094)       0:00:30.667 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.033)       0:00:30.700 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.036)       0:00:30.736 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.043)       0:00:30.780 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.034)       0:00:30.814 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.031)       0:00:30.846 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.029)       0:00:30.875 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.029)       0:00:30.905 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.027)       0:00:30.932 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.022)       0:00:30.955 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.021)       0:00:30.977 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.028)       0:00:31.006 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.028)       0:00:31.034 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.035)       0:00:31.070 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.043)       0:00:31.114 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:25:44 -0400 (0:00:00.048)       0:00:31.163 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 5368709120,
        "changed": false,
        "failed": false,
        "lvm": "5g",
        "parted": "5GiB",
        "size": "5 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.047)       0:00:31.211 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.045)       0:00:31.256 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.060)       0:00:31.317 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028524",
    "end": "2024-11-02 19:25:45.492849",
    "rc": 0,
    "start": "2024-11-02 19:25:45.464325"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.424)       0:00:31.742 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.078)       0:00:31.820 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.076)       0:00:31.897 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.053)       0:00:31.950 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.043)       0:00:31.994 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.037)       0:00:32.031 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.038)       0:00:32.070 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.020)       0:00:32.091 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.017)       0:00:32.109 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Change the file system signature on the logical volume created above] ****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:46
Saturday 02 November 2024  19:25:45 -0400 (0:00:00.024)       0:00:32.133 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.062)       0:00:32.196 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.050)       0:00:32.246 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.071)       0:00:32.318 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.093)       0:00:32.411 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.037)       0:00:32.448 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.041)       0:00:32.490 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.035)       0:00:32.526 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.037)       0:00:32.564 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.091)       0:00:32.655 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.041)       0:00:32.697 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.084)       0:00:32.781 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.078)       0:00:32.859 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.048)       0:00:32.908 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.048)       0:00:32.956 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.038)       0:00:32.995 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.037)       0:00:33.032 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.089)       0:00:33.122 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:25:46 -0400 (0:00:00.027)       0:00:33.150 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:25:48 -0400 (0:00:01.778)       0:00:34.929 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:25:48 -0400 (0:00:00.069)       0:00:34.998 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589930.6483526,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3a0ec056933b01b431c10ec4caa7278d6f7f1948",
        "ctime": 1730589930.6443527,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589930.6443527,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.453)       0:00:35.451 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.041)       0:00:35.493 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.037)       0:00:35.530 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.044)       0:00:35.575 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.041)       0:00:35.616 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.035)       0:00:35.652 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:25:49 -0400 (0:00:00.062)       0:00:35.715 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:25:50 -0400 (0:00:00.806)       0:00:36.521 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:25:50 -0400 (0:00:00.577)       0:00:37.098 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:25:51 -0400 (0:00:00.075)       0:00:37.174 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:25:51 -0400 (0:00:00.868)       0:00:38.043 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:25:52 -0400 (0:00:00.399)       0:00:38.442 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:25:52 -0400 (0:00:00.025)       0:00:38.467 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:59
Saturday 02 November 2024  19:25:54 -0400 (0:00:02.056)       0:00:40.524 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:25:54 -0400 (0:00:00.137)       0:00:40.662 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:25:54 -0400 (0:00:00.110)       0:00:40.773 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:25:54 -0400 (0:00:00.095)       0:00:40.868 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "5G",
            "type": "lvm",
            "uuid": "c454eb62-a0eb-4a79-89bf-0d471375a06c"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "xOHC09-8dIW-nh4C-0hDX-JdU0-78e6-Sh8o2o"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:25:55 -0400 (0:00:00.488)       0:00:41.357 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004659",
    "end": "2024-11-02 19:25:56.541260",
    "rc": 0,
    "start": "2024-11-02 19:25:55.536601"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:25:56 -0400 (0:00:01.411)       0:00:42.768 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003462",
    "end": "2024-11-02 19:25:56.928596",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:25:56.925134"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:25:56 -0400 (0:00:00.385)       0:00:43.154 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.072)       0:00:43.227 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.035)       0:00:43.262 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.030938",
    "end": "2024-11-02 19:25:57.463809",
    "rc": 0,
    "start": "2024-11-02 19:25:57.432871"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.442)       0:00:43.705 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.046)       0:00:43.752 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.091)       0:00:43.844 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:25:57 -0400 (0:00:00.071)       0:00:43.915 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.424)       0:00:44.340 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.085)       0:00:44.426 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.073)       0:00:44.500 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.056)       0:00:44.556 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.034)       0:00:44.591 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.067)       0:00:44.658 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.037)       0:00:44.695 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.044)       0:00:44.740 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:25:58 -0400 (0:00:00.418)       0:00:45.159 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.063)       0:00:45.222 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.050)       0:00:45.272 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.028)       0:00:45.300 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.024)       0:00:45.325 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.022)       0:00:45.347 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.020)       0:00:45.368 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.028)       0:00:45.397 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.029)       0:00:45.427 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.021)       0:00:45.448 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.020)       0:00:45.468 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.022)       0:00:45.491 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.021)       0:00:45.513 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.026)       0:00:45.539 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.061)       0:00:45.600 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.059)       0:00:45.660 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.045)       0:00:45.705 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.051)       0:00:45.757 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.044)       0:00:45.801 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.038)       0:00:45.840 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.031)       0:00:45.871 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.031)       0:00:45.903 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.032)       0:00:45.935 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.049)       0:00:45.985 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.090)       0:00:46.075 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.031)       0:00:46.107 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:25:59 -0400 (0:00:00.031)       0:00:46.139 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.030)       0:00:46.170 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.045)       0:00:46.215 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.104)       0:00:46.320 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.111)       0:00:46.431 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.068)       0:00:46.499 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.080)       0:00:46.579 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.081)       0:00:46.661 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.073)       0:00:46.734 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.059)       0:00:46.794 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.054)       0:00:46.848 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.064)       0:00:46.913 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.036)       0:00:46.949 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.032)       0:00:46.982 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.072)       0:00:47.055 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.065)       0:00:47.121 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:26:00 -0400 (0:00:00.034)       0:00:47.155 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.034)       0:00:47.189 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.032)       0:00:47.221 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.031)       0:00:47.253 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.043)       0:00:47.296 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.049)       0:00:47.345 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.052)       0:00:47.398 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.101)       0:00:47.499 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.040)       0:00:47.539 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.061)       0:00:47.601 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.033)       0:00:47.634 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.032)       0:00:47.666 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.031)       0:00:47.698 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.034)       0:00:47.732 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.040)       0:00:47.773 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.082)       0:00:47.856 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.095)       0:00:47.952 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:26:01 -0400 (0:00:00.197)       0:00:48.149 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.048)       0:00:48.198 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.079)       0:00:48.278 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.026)       0:00:48.304 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.030)       0:00:48.335 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.025)       0:00:48.360 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.022)       0:00:48.383 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.023)       0:00:48.407 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.025)       0:00:48.432 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.020)       0:00:48.453 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.021)       0:00:48.475 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.023)       0:00:48.498 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.065)       0:00:48.564 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.055)       0:00:48.620 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.122)       0:00:48.743 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.179)       0:00:48.922 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.072)       0:00:48.994 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:26:02 -0400 (0:00:00.068)       0:00:49.062 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.135)       0:00:49.198 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.075)       0:00:49.273 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589928.016337,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730589928.016337,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5141,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730589928.016337,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.470)       0:00:49.744 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.059)       0:00:49.803 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.046)       0:00:49.850 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.048)       0:00:49.899 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.048)       0:00:49.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.034)       0:00:49.981 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.043)       0:00:50.025 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:26:03 -0400 (0:00:00.034)       0:00:50.060 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.791)       0:00:50.852 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.039)       0:00:50.892 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.036)       0:00:50.929 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.082)       0:00:51.011 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.030)       0:00:51.042 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.033)       0:00:51.075 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.030)       0:00:51.106 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:26:04 -0400 (0:00:00.028)       0:00:51.135 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.030)       0:00:51.166 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.071)       0:00:51.237 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.101)       0:00:51.338 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.088)       0:00:51.427 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.059)       0:00:51.486 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.073)       0:00:51.560 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.046)       0:00:51.607 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.046)       0:00:51.653 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.029)       0:00:51.683 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.041)       0:00:51.725 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.035)       0:00:51.760 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.052)       0:00:51.813 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.051)       0:00:51.865 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.027)       0:00:51.892 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.030)       0:00:51.923 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.031)       0:00:51.954 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:26:05 -0400 (0:00:00.031)       0:00:51.985 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:26:06 -0400 (0:00:00.491)       0:00:52.476 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:26:06 -0400 (0:00:00.492)       0:00:52.969 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "5368709120"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:26:06 -0400 (0:00:00.085)       0:00:53.054 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:26:06 -0400 (0:00:00.061)       0:00:53.116 ***** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.492)       0:00:53.608 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.063)       0:00:53.672 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.067)       0:00:53.740 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.083)       0:00:53.824 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.086)       0:00:53.910 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.051)       0:00:53.961 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.058)       0:00:54.020 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.037)       0:00:54.058 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.037)       0:00:54.095 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:26:07 -0400 (0:00:00.036)       0:00:54.131 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.038)       0:00:54.170 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.034)       0:00:54.205 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.036)       0:00:54.242 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.032)       0:00:54.274 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.034)       0:00:54.308 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.033)       0:00:54.341 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.033)       0:00:54.375 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.032)       0:00:54.408 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.036)       0:00:54.444 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.032)       0:00:54.477 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 5368709120,
        "changed": false,
        "failed": false,
        "lvm": "5g",
        "parted": "5GiB",
        "size": "5 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.040)       0:00:54.518 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.039)       0:00:54.557 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.075)       0:00:54.632 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.037133",
    "end": "2024-11-02 19:26:08.870847",
    "rc": 0,
    "start": "2024-11-02 19:26:08.833714"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:26:08 -0400 (0:00:00.480)       0:00:55.113 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.169)       0:00:55.282 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.117)       0:00:55.399 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.070)       0:00:55.470 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.063)       0:00:55.533 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.076)       0:00:55.610 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.120)       0:00:55.730 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.061)       0:00:55.792 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.049)       0:00:55.842 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Re-run the role on the same volume without specifying fs_type] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:62
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.047)       0:00:55.889 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.130)       0:00:56.020 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:26:09 -0400 (0:00:00.109)       0:00:56.129 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.123)       0:00:56.253 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.112)       0:00:56.365 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.060)       0:00:56.426 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.059)       0:00:56.487 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.036)       0:00:56.524 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.033)       0:00:56.557 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.085)       0:00:56.643 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.042)       0:00:56.686 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.067)       0:00:56.754 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.073)       0:00:56.827 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.079)       0:00:56.906 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.040)       0:00:56.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.042)       0:00:56.989 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.047)       0:00:57.037 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:26:10 -0400 (0:00:00.108)       0:00:57.145 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:26:11 -0400 (0:00:00.065)       0:00:57.211 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:26:12 -0400 (0:00:01.737)       0:00:58.949 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:26:12 -0400 (0:00:00.071)       0:00:59.021 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589930.6483526,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3a0ec056933b01b431c10ec4caa7278d6f7f1948",
        "ctime": 1730589930.6443527,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589930.6443527,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.432)       0:00:59.453 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.042)       0:00:59.495 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.031)       0:00:59.527 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.045)       0:00:59.573 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.038)       0:00:59.612 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.031)       0:00:59.643 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:26:13 -0400 (0:00:00.048)       0:00:59.692 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:26:14 -0400 (0:00:00.778)       0:01:00.470 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:26:14 -0400 (0:00:00.465)       0:01:00.936 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:26:14 -0400 (0:00:00.137)       0:01:01.073 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:26:15 -0400 (0:00:00.828)       0:01:01.902 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:26:16 -0400 (0:00:00.434)       0:01:02.337 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:26:16 -0400 (0:00:00.029)       0:01:02.366 ***** 
ok: [managed-node2]

TASK [Verify the output of the duplicate volumes test] *************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:74
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.985)       0:01:03.352 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:81
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.048)       0:01:03.400 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.055)       0:01:03.456 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.080)       0:01:03.537 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.069)       0:01:03.606 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "5G",
            "type": "lvm",
            "uuid": "c454eb62-a0eb-4a79-89bf-0d471375a06c"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "xOHC09-8dIW-nh4C-0hDX-JdU0-78e6-Sh8o2o"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:26:17 -0400 (0:00:00.395)       0:01:04.001 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004747",
    "end": "2024-11-02 19:26:19.157699",
    "rc": 0,
    "start": "2024-11-02 19:26:18.152952"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:26:19 -0400 (0:00:01.386)       0:01:05.388 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003362",
    "end": "2024-11-02 19:26:19.543494",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:26:19.540132"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:26:19 -0400 (0:00:00.418)       0:01:05.807 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:26:19 -0400 (0:00:00.150)       0:01:05.957 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:26:19 -0400 (0:00:00.034)       0:01:05.992 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.024267",
    "end": "2024-11-02 19:26:20.178827",
    "rc": 0,
    "start": "2024-11-02 19:26:20.154560"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.447)       0:01:06.440 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.031)       0:01:06.471 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.049)       0:01:06.520 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.057)       0:01:06.577 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.420)       0:01:06.997 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.045)       0:01:07.043 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:26:20 -0400 (0:00:00.050)       0:01:07.093 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.074)       0:01:07.168 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.050)       0:01:07.218 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.093)       0:01:07.312 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.035)       0:01:07.347 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.067)       0:01:07.415 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.436)       0:01:07.852 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.050)       0:01:07.902 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.048)       0:01:07.951 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.021)       0:01:07.973 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.021)       0:01:07.994 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.021)       0:01:08.016 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.021)       0:01:08.037 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.028)       0:01:08.066 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.032)       0:01:08.099 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:26:21 -0400 (0:00:00.047)       0:01:08.146 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.039)       0:01:08.185 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.036)       0:01:08.221 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.030)       0:01:08.252 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.046)       0:01:08.298 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.053)       0:01:08.351 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.051)       0:01:08.403 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.031)       0:01:08.434 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.028)       0:01:08.463 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.026)       0:01:08.490 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.029)       0:01:08.519 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.029)       0:01:08.549 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.038)       0:01:08.588 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.040)       0:01:08.629 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.049)       0:01:08.679 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.050)       0:01:08.729 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.043)       0:01:08.772 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.025)       0:01:08.798 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.021)       0:01:08.819 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.022)       0:01:08.841 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.047)       0:01:08.889 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.044)       0:01:08.934 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.026)       0:01:08.960 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.049)       0:01:09.010 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.074)       0:01:09.084 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 02 November 2024  19:26:22 -0400 (0:00:00.066)       0:01:09.151 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.049)       0:01:09.200 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.047)       0:01:09.248 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.045)       0:01:09.293 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.022)       0:01:09.316 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.022)       0:01:09.339 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.084)       0:01:09.423 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.044)       0:01:09.467 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.028)       0:01:09.495 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.039)       0:01:09.534 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.033)       0:01:09.568 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.031)       0:01:09.600 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.060)       0:01:09.661 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.060)       0:01:09.722 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.067)       0:01:09.790 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.141)       0:01:09.932 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.058)       0:01:09.991 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.033)       0:01:10.024 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.033)       0:01:10.058 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.041)       0:01:10.100 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:26:23 -0400 (0:00:00.043)       0:01:10.143 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.041)       0:01:10.185 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.032)       0:01:10.217 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.062)       0:01:10.280 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.065)       0:01:10.345 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.171)       0:01:10.517 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.039)       0:01:10.556 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.051)       0:01:10.607 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.022)       0:01:10.629 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.027)       0:01:10.657 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.023)       0:01:10.680 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.020)       0:01:10.700 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.021)       0:01:10.722 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.020)       0:01:10.742 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.020)       0:01:10.763 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.023)       0:01:10.786 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.032)       0:01:10.818 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.104)       0:01:10.922 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.053)       0:01:10.975 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.049)       0:01:11.025 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.040)       0:01:11.066 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.026)       0:01:11.093 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:26:24 -0400 (0:00:00.026)       0:01:11.119 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.050)       0:01:11.170 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.059)       0:01:11.229 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589928.016337,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730589928.016337,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5141,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730589928.016337,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.413)       0:01:11.642 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.028)       0:01:11.671 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.021)       0:01:11.693 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.036)       0:01:11.730 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.036)       0:01:11.766 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.041)       0:01:11.808 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.052)       0:01:11.861 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:26:25 -0400 (0:00:00.031)       0:01:11.892 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.848)       0:01:12.741 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.037)       0:01:12.778 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.037)       0:01:12.815 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.100)       0:01:12.915 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.033)       0:01:12.949 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.044)       0:01:12.993 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.062)       0:01:13.056 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:26:26 -0400 (0:00:00.088)       0:01:13.145 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.033)       0:01:13.178 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.056)       0:01:13.235 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.056)       0:01:13.291 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.051)       0:01:13.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.062)       0:01:13.405 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.071)       0:01:13.476 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.035)       0:01:13.512 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.035)       0:01:13.548 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.044)       0:01:13.593 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.063)       0:01:13.656 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.061)       0:01:13.717 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.052)       0:01:13.769 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.041)       0:01:13.811 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.031)       0:01:13.843 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.045)       0:01:13.888 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.038)       0:01:13.927 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:26:27 -0400 (0:00:00.033)       0:01:13.961 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:26:28 -0400 (0:00:00.530)       0:01:14.491 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:26:28 -0400 (0:00:00.471)       0:01:14.963 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "5368709120"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:26:28 -0400 (0:00:00.106)       0:01:15.070 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:26:28 -0400 (0:00:00.047)       0:01:15.117 ***** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.467)       0:01:15.584 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.070)       0:01:15.655 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.073)       0:01:15.729 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.067)       0:01:15.796 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.093)       0:01:15.890 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.056)       0:01:15.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.063)       0:01:16.010 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.051)       0:01:16.062 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.035)       0:01:16.097 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:26:29 -0400 (0:00:00.043)       0:01:16.140 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.030)       0:01:16.171 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.041)       0:01:16.213 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.036)       0:01:16.249 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.053)       0:01:16.303 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.046)       0:01:16.350 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.030)       0:01:16.381 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.034)       0:01:16.416 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.033)       0:01:16.449 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.044)       0:01:16.494 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.051)       0:01:16.545 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 5368709120,
        "changed": false,
        "failed": false,
        "lvm": "5g",
        "parted": "5GiB",
        "size": "5 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.078)       0:01:16.623 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.063)       0:01:16.687 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:26:30 -0400 (0:00:00.125)       0:01:16.812 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.031926",
    "end": "2024-11-02 19:26:31.103790",
    "rc": 0,
    "start": "2024-11-02 19:26:31.071864"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.550)       0:01:17.363 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.076)       0:01:17.440 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.086)       0:01:17.526 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.087)       0:01:17.613 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.085)       0:01:17.699 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.088)       0:01:17.788 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.062)       0:01:17.850 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.025)       0:01:17.876 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.021)       0:01:17.898 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:84
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.030)       0:01:17.928 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.067)       0:01:17.995 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.037)       0:01:18.033 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:26:31 -0400 (0:00:00.063)       0:01:18.097 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.082)       0:01:18.180 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.040)       0:01:18.220 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.041)       0:01:18.261 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.086)       0:01:18.347 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.037)       0:01:18.384 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.085)       0:01:18.470 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.034)       0:01:18.504 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.090)       0:01:18.594 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.037)       0:01:18.632 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.034)       0:01:18.667 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.029)       0:01:18.696 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.030)       0:01:18.727 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.028)       0:01:18.755 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.069)       0:01:18.825 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:26:32 -0400 (0:00:00.019)       0:01:18.844 ***** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:26:34 -0400 (0:00:01.699)       0:01:20.543 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:26:34 -0400 (0:00:00.066)       0:01:20.610 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589930.6483526,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3a0ec056933b01b431c10ec4caa7278d6f7f1948",
        "ctime": 1730589930.6443527,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589930.6443527,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:26:34 -0400 (0:00:00.462)       0:01:21.073 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:26:34 -0400 (0:00:00.054)       0:01:21.129 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:26:35 -0400 (0:00:00.052)       0:01:21.182 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:26:35 -0400 (0:00:00.079)       0:01:21.261 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:26:35 -0400 (0:00:00.054)       0:01:21.316 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:26:35 -0400 (0:00:00.068)       0:01:21.384 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:26:35 -0400 (0:00:00.093)       0:01:21.478 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:26:36 -0400 (0:00:00.866)       0:01:22.344 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:26:36 -0400 (0:00:00.569)       0:01:22.914 ***** 
skipping: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:26:36 -0400 (0:00:00.100)       0:01:23.014 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:26:37 -0400 (0:00:00.843)       0:01:23.858 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:26:38 -0400 (0:00:00.517)       0:01:24.375 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:26:38 -0400 (0:00:00.059)       0:01:24.434 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:97
Saturday 02 November 2024  19:26:39 -0400 (0:00:01.089)       0:01:25.523 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:26:39 -0400 (0:00:00.082)       0:01:25.606 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:26:39 -0400 (0:00:00.073)       0:01:25.679 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:26:39 -0400 (0:00:00.061)       0:01:25.740 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "5G",
            "type": "lvm",
            "uuid": "c454eb62-a0eb-4a79-89bf-0d471375a06c"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "xOHC09-8dIW-nh4C-0hDX-JdU0-78e6-Sh8o2o"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:26:40 -0400 (0:00:00.433)       0:01:26.174 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003200",
    "end": "2024-11-02 19:26:40.350293",
    "rc": 0,
    "start": "2024-11-02 19:26:40.347093"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:26:40 -0400 (0:00:00.410)       0:01:26.585 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003192",
    "end": "2024-11-02 19:26:40.765354",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:26:40.762162"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:26:40 -0400 (0:00:00.418)       0:01:27.003 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:26:40 -0400 (0:00:00.104)       0:01:27.108 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:26:40 -0400 (0:00:00.035)       0:01:27.143 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028315",
    "end": "2024-11-02 19:26:41.346845",
    "rc": 0,
    "start": "2024-11-02 19:26:41.318530"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:26:41 -0400 (0:00:00.468)       0:01:27.611 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:26:41 -0400 (0:00:00.052)       0:01:27.664 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:26:41 -0400 (0:00:00.090)       0:01:27.755 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:26:41 -0400 (0:00:00.082)       0:01:27.838 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.438)       0:01:28.277 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.076)       0:01:28.353 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.073)       0:01:28.426 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.110)       0:01:28.537 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.075)       0:01:28.613 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.090)       0:01:28.703 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.033)       0:01:28.736 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:26:42 -0400 (0:00:00.081)       0:01:28.818 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.497)       0:01:29.315 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.088)       0:01:29.404 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.080)       0:01:29.485 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.044)       0:01:29.529 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.081)       0:01:29.611 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.032)       0:01:29.643 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.032)       0:01:29.675 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.031)       0:01:29.707 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.030)       0:01:29.737 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.029)       0:01:29.767 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.033)       0:01:29.801 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.030)       0:01:29.832 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.029)       0:01:29.861 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.031)       0:01:29.893 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.067)       0:01:29.960 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.061)       0:01:30.022 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.062)       0:01:30.085 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:26:43 -0400 (0:00:00.062)       0:01:30.147 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.055)       0:01:30.203 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.041)       0:01:30.245 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.040)       0:01:30.285 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.057)       0:01:30.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.074)       0:01:30.417 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.092)       0:01:30.509 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.067)       0:01:30.576 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.032)       0:01:30.609 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.042)       0:01:30.652 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.042)       0:01:30.695 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.037)       0:01:30.732 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.074)       0:01:30.807 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.064)       0:01:30.871 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.036)       0:01:30.908 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.059)       0:01:30.967 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.069)       0:01:31.036 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 02 November 2024  19:26:44 -0400 (0:00:00.067)       0:01:31.104 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.100)       0:01:31.204 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.078)       0:01:31.283 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.076)       0:01:31.360 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.042)       0:01:31.402 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.039)       0:01:31.442 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.091)       0:01:31.533 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.056)       0:01:31.589 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.025)       0:01:31.614 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.023)       0:01:31.638 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.025)       0:01:31.663 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.024)       0:01:31.688 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.021)       0:01:31.709 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.020)       0:01:31.730 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.025)       0:01:31.756 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.085)       0:01:31.841 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.024)       0:01:31.865 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.021)       0:01:31.886 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.025)       0:01:31.912 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.030)       0:01:31.943 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.028)       0:01:31.971 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.096)       0:01:32.068 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.035)       0:01:32.103 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:26:45 -0400 (0:00:00.058)       0:01:32.161 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.094)       0:01:32.255 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.180)       0:01:32.436 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.051)       0:01:32.487 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.071)       0:01:32.559 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.035)       0:01:32.595 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.040)       0:01:32.636 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.030)       0:01:32.666 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.030)       0:01:32.697 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.042)       0:01:32.739 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.045)       0:01:32.785 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.057)       0:01:32.842 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.060)       0:01:32.903 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.060)       0:01:32.964 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:26:46 -0400 (0:00:00.146)       0:01:33.110 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.096)       0:01:33.207 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.107)       0:01:33.315 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.068)       0:01:33.383 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.042)       0:01:33.426 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.036)       0:01:33.463 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.080)       0:01:33.543 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.109)       0:01:33.653 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589928.016337,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730589928.016337,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5141,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730589928.016337,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.455)       0:01:34.108 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:26:47 -0400 (0:00:00.057)       0:01:34.165 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.033)       0:01:34.199 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.051)       0:01:34.251 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.043)       0:01:34.294 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.037)       0:01:34.332 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.069)       0:01:34.402 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:26:48 -0400 (0:00:00.044)       0:01:34.447 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.841)       0:01:35.288 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.050)       0:01:35.338 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.058)       0:01:35.397 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.127)       0:01:35.525 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.039)       0:01:35.565 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.039)       0:01:35.605 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.036)       0:01:35.641 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.034)       0:01:35.676 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.032)       0:01:35.709 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.083)       0:01:35.792 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.094)       0:01:35.887 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.066)       0:01:35.954 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.078)       0:01:36.033 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.066)       0:01:36.099 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:26:49 -0400 (0:00:00.036)       0:01:36.135 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.036)       0:01:36.171 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.033)       0:01:36.205 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.032)       0:01:36.237 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.032)       0:01:36.270 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.034)       0:01:36.304 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.032)       0:01:36.337 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.035)       0:01:36.373 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.032)       0:01:36.405 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.035)       0:01:36.441 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.035)       0:01:36.476 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:26:50 -0400 (0:00:00.473)       0:01:36.949 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:26:51 -0400 (0:00:00.458)       0:01:37.407 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "5368709120"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:26:51 -0400 (0:00:00.195)       0:01:37.603 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:26:51 -0400 (0:00:00.044)       0:01:37.647 ***** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:26:51 -0400 (0:00:00.445)       0:01:38.093 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:26:51 -0400 (0:00:00.061)       0:01:38.155 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.066)       0:01:38.221 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.074)       0:01:38.296 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.071)       0:01:38.368 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.036)       0:01:38.404 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.033)       0:01:38.437 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.031)       0:01:38.468 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.035)       0:01:38.504 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.034)       0:01:38.538 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.037)       0:01:38.576 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.030)       0:01:38.607 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.032)       0:01:38.640 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.037)       0:01:38.678 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.045)       0:01:38.723 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.035)       0:01:38.758 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.034)       0:01:38.793 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.030)       0:01:38.824 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.034)       0:01:38.859 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.029)       0:01:38.888 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 5368709120,
        "changed": false,
        "failed": false,
        "lvm": "5g",
        "parted": "5GiB",
        "size": "5 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.036)       0:01:38.925 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.034)       0:01:38.959 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:26:52 -0400 (0:00:00.069)       0:01:39.029 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028077",
    "end": "2024-11-02 19:26:53.214182",
    "rc": 0,
    "start": "2024-11-02 19:26:53.186105"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.425)       0:01:39.454 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.070)       0:01:39.525 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.071)       0:01:39.596 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.047)       0:01:39.643 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.051)       0:01:39.694 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.063)       0:01:39.758 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.046)       0:01:39.805 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.022)       0:01:39.827 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.018)       0:01:39.845 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Remove the FS] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:100
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.022)       0:01:39.868 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.070)       0:01:39.938 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.032)       0:01:39.970 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.046)       0:01:40.016 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.054)       0:01:40.071 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.025)       0:01:40.097 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.025)       0:01:40.123 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:26:53 -0400 (0:00:00.022)       0:01:40.145 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.022)       0:01:40.167 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.054)       0:01:40.222 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.031)       0:01:40.253 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "fs_type": "unformatted",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.036)       0:01:40.290 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.042)       0:01:40.332 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.042)       0:01:40.374 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.042)       0:01:40.417 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.041)       0:01:40.458 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.039)       0:01:40.498 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.092)       0:01:40.590 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:26:54 -0400 (0:00:00.032)       0:01:40.622 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
            "state": "absent"
        },
        {
            "path": "/opt/test1",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "unformatted",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:26:56 -0400 (0:00:01.909)       0:01:42.532 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:26:56 -0400 (0:00:00.040)       0:01:42.573 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589930.6483526,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3a0ec056933b01b431c10ec4caa7278d6f7f1948",
        "ctime": 1730589930.6443527,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730589930.6443527,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:26:56 -0400 (0:00:00.391)       0:01:42.965 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:26:57 -0400 (0:00:00.439)       0:01:43.404 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:26:57 -0400 (0:00:00.032)       0:01:43.436 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
                "state": "absent"
            },
            {
                "path": "/opt/test1",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "unformatted",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:26:57 -0400 (0:00:00.084)       0:01:43.521 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "unformatted",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:26:57 -0400 (0:00:00.042)       0:01:43.563 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:26:57 -0400 (0:00:00.037)       0:01:43.601 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node2] => (item={'src': 'UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c454eb62-a0eb-4a79-89bf-0d471375a06c"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node2] => (item={'path': '/opt/test1', 'state': 'absent'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "mount_info": {
        "path": "/opt/test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:26:58 -0400 (0:00:00.795)       0:01:44.397 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:26:58 -0400 (0:00:00.752)       0:01:45.149 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:26:59 -0400 (0:00:00.049)       0:01:45.198 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:26:59 -0400 (0:00:00.049)       0:01:45.248 ***** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:26:59 -0400 (0:00:00.752)       0:01:46.001 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:27:00 -0400 (0:00:00.396)       0:01:46.398 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:27:00 -0400 (0:00:00.023)       0:01:46.421 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:112
Saturday 02 November 2024  19:27:01 -0400 (0:00:00.977)       0:01:47.398 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:27:01 -0400 (0:00:00.079)       0:01:47.478 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "unformatted",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:27:01 -0400 (0:00:00.061)       0:01:47.539 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:27:01 -0400 (0:00:00.047)       0:01:47.587 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/foo-test1",
            "size": "5G",
            "type": "lvm",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "xOHC09-8dIW-nh4C-0hDX-JdU0-78e6-Sh8o2o"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:27:01 -0400 (0:00:00.418)       0:01:48.006 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002979",
    "end": "2024-11-02 19:27:02.185710",
    "rc": 0,
    "start": "2024-11-02 19:27:02.182731"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:27:02 -0400 (0:00:00.411)       0:01:48.417 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003006",
    "end": "2024-11-02 19:27:02.561725",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:27:02.558719"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:27:02 -0400 (0:00:00.393)       0:01:48.810 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'unformatted', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:27:02 -0400 (0:00:00.146)       0:01:48.957 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:27:02 -0400 (0:00:00.035)       0:01:48.993 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028116",
    "end": "2024-11-02 19:27:03.242548",
    "rc": 0,
    "start": "2024-11-02 19:27:03.214432"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.485)       0:01:49.479 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.045)       0:01:49.524 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.075)       0:01:49.600 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.080)       0:01:49.681 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.402)       0:01:50.084 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:27:03 -0400 (0:00:00.056)       0:01:50.141 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.070)       0:01:50.211 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.054)       0:01:50.266 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.028)       0:01:50.294 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.047)       0:01:50.342 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.027)       0:01:50.370 ***** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.044)       0:01:50.414 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.416)       0:01:50.831 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.058)       0:01:50.890 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.074)       0:01:50.964 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.024)       0:01:50.989 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.028)       0:01:51.018 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.036)       0:01:51.054 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.035)       0:01:51.090 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.038)       0:01:51.128 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:27:04 -0400 (0:00:00.033)       0:01:51.162 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.032)       0:01:51.195 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.063)       0:01:51.258 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.042)       0:01:51.301 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.028)       0:01:51.329 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.030)       0:01:51.359 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.067)       0:01:51.427 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'unformatted', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.068)       0:01:51.495 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.032)       0:01:51.528 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.030)       0:01:51.559 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.028)       0:01:51.587 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.029)       0:01:51.616 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.068)       0:01:51.684 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.028)       0:01:51.713 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.025)       0:01:51.739 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.044)       0:01:51.783 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'unformatted', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.041)       0:01:51.825 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.028)       0:01:51.853 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.023)       0:01:51.876 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.029)       0:01:51.906 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.049)       0:01:51.956 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.098)       0:01:52.054 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.057)       0:01:52.112 ***** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:27:05 -0400 (0:00:00.038)       0:01:52.150 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.045)       0:01:52.195 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.048)       0:01:52.244 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.060)       0:01:52.304 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.041)       0:01:52.345 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.057)       0:01:52.403 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.132)       0:01:52.535 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.055)       0:01:52.591 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.041)       0:01:52.634 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.091)       0:01:52.726 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'unformatted', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.071)       0:01:52.797 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.035)       0:01:52.832 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.035)       0:01:52.868 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.023)       0:01:52.891 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.023)       0:01:52.915 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.031)       0:01:52.946 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.029)       0:01:52.976 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.040)       0:01:53.016 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.069)       0:01:53.085 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.024)       0:01:53.110 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.023)       0:01:53.133 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:27:06 -0400 (0:00:00.025)       0:01:53.159 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.021)       0:01:53.180 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.022)       0:01:53.203 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.026)       0:01:53.229 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.025)       0:01:53.255 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'unformatted', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.062)       0:01:53.317 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.068)       0:01:53.386 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.187)       0:01:53.573 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.041)       0:01:53.615 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.058)       0:01:53.673 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.026)       0:01:53.699 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.030)       0:01:53.730 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.021)       0:01:53.752 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.019)       0:01:53.772 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.020)       0:01:53.792 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.022)       0:01:53.815 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.064)       0:01:53.879 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.022)       0:01:53.901 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.022)       0:01:53.924 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.083)       0:01:54.008 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.057)       0:01:54.066 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:27:07 -0400 (0:00:00.057)       0:01:54.123 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.053)       0:01:54.176 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.028)       0:01:54.205 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.030)       0:01:54.236 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.070)       0:01:54.306 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.083)       0:01:54.390 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730590016.2328696,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1730590016.2328696,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5141,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1730590016.2328696,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.438)       0:01:54.829 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.034)       0:01:54.864 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.023)       0:01:54.887 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.040)       0:01:54.928 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.037)       0:01:54.965 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.032)       0:01:54.997 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.038)       0:01:55.036 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:27:08 -0400 (0:00:00.025)       0:01:55.061 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.723)       0:01:55.785 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.035)       0:01:55.820 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.033)       0:01:55.854 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.084)       0:01:55.938 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.055)       0:01:55.994 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.064)       0:01:56.059 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:27:09 -0400 (0:00:00.067)       0:01:56.126 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.041)       0:01:56.167 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.059)       0:01:56.227 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.082)       0:01:56.309 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.086)       0:01:56.396 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.079)       0:01:56.475 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.089)       0:01:56.564 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.065)       0:01:56.630 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.060)       0:01:56.690 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.044)       0:01:56.735 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.044)       0:01:56.779 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.068)       0:01:56.847 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.064)       0:01:56.911 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.061)       0:01:56.973 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.045)       0:01:57.019 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.036)       0:01:57.055 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.037)       0:01:57.093 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:27:10 -0400 (0:00:00.047)       0:01:57.140 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:27:11 -0400 (0:00:00.042)       0:01:57.183 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:27:11 -0400 (0:00:00.420)       0:01:57.604 ***** 
ok: [managed-node2] => {
    "bytes": 5368709120,
    "changed": false,
    "lvm": "5g",
    "parted": "5GiB",
    "size": "5 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:27:11 -0400 (0:00:00.407)       0:01:58.012 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "5368709120"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:27:11 -0400 (0:00:00.052)       0:01:58.064 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:27:11 -0400 (0:00:00.029)       0:01:58.094 ***** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.401)       0:01:58.495 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.049)       0:01:58.545 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.067)       0:01:58.613 ***** 
skipping: [managed-node2] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.046)       0:01:58.659 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.042)       0:01:58.702 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.020)       0:01:58.722 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.023)       0:01:58.745 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.020)       0:01:58.766 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.020)       0:01:58.786 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.020)       0:01:58.807 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.022)       0:01:58.829 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.024)       0:01:58.854 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.032)       0:01:58.887 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.030)       0:01:58.917 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.029)       0:01:58.947 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.029)       0:01:58.976 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.028)       0:01:59.004 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.030)       0:01:59.035 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.031)       0:01:59.067 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.029)       0:01:59.096 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 5368709120,
        "changed": false,
        "failed": false,
        "lvm": "5g",
        "parted": "5GiB",
        "size": "5 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:27:12 -0400 (0:00:00.036)       0:01:59.132 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.035)       0:01:59.167 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.070)       0:01:59.238 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.024019",
    "end": "2024-11-02 19:27:13.416642",
    "rc": 0,
    "start": "2024-11-02 19:27:13.392623"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-a----- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.408)       0:01:59.647 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.065)       0:01:59.713 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.072)       0:01:59.785 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.078)       0:01:59.863 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.059)       0:01:59.923 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.074)       0:01:59.998 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:27:13 -0400 (0:00:00.134)       0:02:00.132 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.098)       0:02:00.231 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.032)       0:02:00.263 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:116
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.038)       0:02:00.301 ***** 
included: fedora.linux_system_roles.storage for managed-node2

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.134)       0:02:00.436 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.054)       0:02:00.490 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.069)       0:02:00.559 ***** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node2] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.086)       0:02:00.646 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.051)       0:02:00.697 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.059)       0:02:00.757 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.037)       0:02:00.794 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.035)       0:02:00.830 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.082)       0:02:00.912 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.040)       0:02:00.952 ***** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "5g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.057)       0:02:01.009 ***** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.057)       0:02:01.066 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 02 November 2024  19:27:14 -0400 (0:00:00.065)       0:02:01.132 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 02 November 2024  19:27:15 -0400 (0:00:00.086)       0:02:01.218 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 02 November 2024  19:27:15 -0400 (0:00:00.079)       0:02:01.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 02 November 2024  19:27:15 -0400 (0:00:00.080)       0:02:01.379 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 02 November 2024  19:27:15 -0400 (0:00:00.135)       0:02:01.515 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 02 November 2024  19:27:15 -0400 (0:00:00.035)       0:02:01.551 ***** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 02 November 2024  19:27:17 -0400 (0:00:02.138)       0:02:03.690 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 02 November 2024  19:27:17 -0400 (0:00:00.083)       0:02:03.773 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730590018.1728811,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "218269ed484ab71984ca70eb56e2318b37e9204e",
        "ctime": 1730590017.8208792,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 541065418,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1730590017.8208792,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "269688944",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.462)       0:02:04.235 ***** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.437)       0:02:04.673 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.033)       0:02:04.706 ***** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.046)       0:02:04.752 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "5g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.049)       0:02:04.801 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.043)       0:02:04.844 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.065)       0:02:04.910 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.037)       0:02:04.948 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.070)       0:02:05.019 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.067)       0:02:05.086 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 02 November 2024  19:27:18 -0400 (0:00:00.038)       0:02:05.125 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1730589811.9926362,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1730589808.2226133,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 675283161,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1730589808.2243166,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2749971072",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 02 November 2024  19:27:19 -0400 (0:00:00.492)       0:02:05.618 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 02 November 2024  19:27:19 -0400 (0:00:00.063)       0:02:05.682 ***** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:130
Saturday 02 November 2024  19:27:20 -0400 (0:00:01.033)       0:02:06.715 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 02 November 2024  19:27:20 -0400 (0:00:00.101)       0:02:06.817 ***** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "5g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 02 November 2024  19:27:20 -0400 (0:00:00.073)       0:02:06.891 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 02 November 2024  19:27:20 -0400 (0:00:00.065)       0:02:06.956 ***** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "70689318-97f1-4727-890d-5d50652a95d6"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 02 November 2024  19:27:21 -0400 (0:00:00.473)       0:02:07.429 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003067",
    "end": "2024-11-02 19:27:21.602729",
    "rc": 0,
    "start": "2024-11-02 19:27:21.599662"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed Oct 30 09:04:14 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=70689318-97f1-4727-890d-5d50652a95d6 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 02 November 2024  19:27:21 -0400 (0:00:00.438)       0:02:07.867 ***** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003032",
    "end": "2024-11-02 19:27:22.104129",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-02 19:27:22.101097"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.471)       0:02:08.339 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.111)       0:02:08.450 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.059)       0:02:08.510 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.035)       0:02:08.546 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.032)       0:02:08.578 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.089)       0:02:08.668 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "0",
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.177)       0:02:08.845 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.147)       0:02:08.992 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "0"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 02 November 2024  19:27:22 -0400 (0:00:00.084)       0:02:09.077 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": []
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.103)       0:02:09.181 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.127)       0:02:09.308 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.053)       0:02:09.362 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.085)       0:02:09.447 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.042)       0:02:09.490 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.032)       0:02:09.523 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.41.29 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.41.29 originally 10.31.41.29
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eccdcee41b'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.41.29 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.446)       0:02:09.969 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 02 November 2024  19:27:23 -0400 (0:00:00.096)       0:02:10.066 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.119)       0:02:10.186 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.059)       0:02:10.246 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.052)       0:02:10.298 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.056)       0:02:10.354 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.067)       0:02:10.422 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.048)       0:02:10.471 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.059)       0:02:10.530 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.032)       0:02:10.563 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.031)       0:02:10.595 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.041)       0:02:10.637 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.061)       0:02:10.699 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.053)       0:02:10.752 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.091)       0:02:10.844 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.098)       0:02:10.943 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.049)       0:02:10.992 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.053)       0:02:11.045 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.046)       0:02:11.092 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 02 November 2024  19:27:24 -0400 (0:00:00.064)       0:02:11.157 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.068)       0:02:11.226 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.073)       0:02:11.299 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.049)       0:02:11.348 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.134)       0:02:11.483 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.082)       0:02:11.566 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.059)       0:02:11.626 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.060)       0:02:11.687 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.069)       0:02:11.756 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.061)       0:02:11.817 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.120)       0:02:11.938 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.079)       0:02:12.017 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.045)       0:02:12.063 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.035)       0:02:12.098 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 02 November 2024  19:27:25 -0400 (0:00:00.034)       0:02:12.133 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.097)       0:02:12.230 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.087)       0:02:12.317 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.037)       0:02:12.355 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.033)       0:02:12.389 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.053)       0:02:12.442 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.040)       0:02:12.483 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.070)       0:02:12.553 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.063)       0:02:12.616 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.070)       0:02:12.687 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.175)       0:02:12.863 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.154)       0:02:13.018 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.046)       0:02:13.065 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.032)       0:02:13.097 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 02 November 2024  19:27:26 -0400 (0:00:00.048)       0:02:13.146 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.041)       0:02:13.187 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.034)       0:02:13.222 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.038)       0:02:13.261 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '5g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.057)       0:02:13.319 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.124)       0:02:13.444 ***** 
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.361)       0:02:13.805 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.050)       0:02:13.855 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.076)       0:02:13.932 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.035)       0:02:13.967 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.056)       0:02:14.024 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.057)       0:02:14.081 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 02 November 2024  19:27:27 -0400 (0:00:00.056)       0:02:14.137 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.034)       0:02:14.171 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.035)       0:02:14.206 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.037)       0:02:14.245 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.059)       0:02:14.305 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.065)       0:02:14.370 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.158)       0:02:14.529 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.092)       0:02:14.621 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.151)       0:02:14.772 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.065)       0:02:14.838 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.044)       0:02:14.883 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.062)       0:02:14.946 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.053)       0:02:14.999 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 02 November 2024  19:27:28 -0400 (0:00:00.063)       0:02:15.063 ***** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.495)       0:02:15.558 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.042)       0:02:15.600 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.047)       0:02:15.648 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.042)       0:02:15.690 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.044)       0:02:15.735 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.044)       0:02:15.779 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.044)       0:02:15.824 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 02 November 2024  19:27:29 -0400 (0:00:00.035)       0:02:15.859 ***** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.791)       0:02:16.651 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.032)       0:02:16.684 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.030)       0:02:16.714 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.029)       0:02:16.744 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.034)       0:02:16.778 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.035)       0:02:16.813 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.053)       0:02:16.866 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.039)       0:02:16.906 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.034)       0:02:16.940 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 02 November 2024  19:27:30 -0400 (0:00:00.098)       0:02:17.039 ***** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.144)       0:02:17.183 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.103)       0:02:17.287 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.099)       0:02:17.386 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.072)       0:02:17.458 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.046)       0:02:17.505 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.036)       0:02:17.542 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.035)       0:02:17.577 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.032)       0:02:17.610 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.031)       0:02:17.642 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.031)       0:02:17.674 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.031)       0:02:17.705 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.032)       0:02:17.738 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.042)       0:02:17.780 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.043)       0:02:17.823 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.032)       0:02:17.855 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.116)       0:02:17.972 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.079)       0:02:18.052 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 02 November 2024  19:27:31 -0400 (0:00:00.085)       0:02:18.138 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.044)       0:02:18.183 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.120)       0:02:18.303 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.099)       0:02:18.402 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.070)       0:02:18.473 ***** 
skipping: [managed-node2] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.112)       0:02:18.586 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.115)       0:02:18.701 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.116)       0:02:18.817 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.037)       0:02:18.855 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.038)       0:02:18.893 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.034)       0:02:18.927 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.034)       0:02:18.962 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.070)       0:02:19.033 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.070)       0:02:19.103 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 02 November 2024  19:27:32 -0400 (0:00:00.035)       0:02:19.139 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.034)       0:02:19.173 ***** 
skipping: [managed-node2] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.038)       0:02:19.212 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.036)       0:02:19.248 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.038)       0:02:19.286 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.041)       0:02:19.328 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.032)       0:02:19.361 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.034)       0:02:19.396 ***** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.040)       0:02:19.436 ***** 
ok: [managed-node2] => {
    "storage_test_expected_size": "5368709120"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.040)       0:02:19.476 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.064)       0:02:19.540 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.042)       0:02:19.583 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.042)       0:02:19.626 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.036)       0:02:19.663 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.038)       0:02:19.701 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.034)       0:02:19.736 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.035)       0:02:19.771 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.032)       0:02:19.804 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.027)       0:02:19.831 ***** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 02 November 2024  19:27:33 -0400 (0:00:00.023)       0:02:19.855 ***** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node2              : ok=626  changed=5    unreachable=0    failed=0    skipped=681  rescued=0    ignored=0   

Saturday 02 November 2024  19:27:33 -0400 (0:00:00.053)       0:02:19.908 ***** 
=============================================================================== 
fedora.linux_system_roles.storage : Get service facts ------------------- 2.23s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.18s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.14s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Update facts ------------------------ 2.06s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 1.99s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.91s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.78s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.74s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.70s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Gathering Facts --------------------------------------------------------- 1.43s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:2 
Read the /etc/fstab file for volume existence --------------------------- 1.41s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 
Read the /etc/fstab file for volume existence --------------------------- 1.39s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 
fedora.linux_system_roles.storage : Update facts ------------------------ 1.09s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 1.03s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 1.02s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.99s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.98s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 0.97s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 0.92s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 
fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 0.87s
/tmp/collections-S9Z/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187