ansible-playbook 2.9.27
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)]
No config file found; using defaults
[WARNING]: running playbook inside collection fedora.linux_system_roles
Skipping callback 'actionable', as we already have a stdout callback.
Skipping callback 'counter_enabled', as we already have a stdout callback.
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'full_skip', as we already have a stdout callback.
Skipping callback 'json', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'null', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
Skipping callback 'selective', as we already have a stdout callback.
Skipping callback 'skippy', as we already have a stdout callback.
Skipping callback 'stderr', as we already have a stdout callback.
Skipping callback 'unixy', as we already have a stdout callback.
Skipping callback 'yaml', as we already have a stdout callback.

PLAYBOOK: tests_mount.yml ******************************************************
1 plays in /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml

PLAY [Basic mount snapshot test] ***********************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:2
Saturday 29 March 2025  18:24:46 -0400 (0:00:00.021)       0:00:00.021 ******** 
ok: [managed-node3]
META: ran handlers

TASK [Setup] *******************************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:46
Saturday 29 March 2025  18:24:47 -0400 (0:00:01.036)       0:00:01.057 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml for managed-node3

TASK [Check if system is ostree] ***********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:10
Saturday 29 March 2025  18:24:47 -0400 (0:00:00.040)       0:00:01.097 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Set mount parent] ********************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:15
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.507)       0:00:01.605 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "test_mnt_parent": "/mnt"
    },
    "changed": false
}

TASK [Run the storage role install base packages] ******************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:19
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.040)       0:00:01.646 ******** 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.049)       0:00:01.695 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.027)       0:00:01.723 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.043)       0:00:01.766 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.071)       0:00:01.837 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.361)       0:00:02.198 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.042)       0:00:02.241 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.017)       0:00:02.258 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.016)       0:00:02.275 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 29 March 2025  18:24:48 -0400 (0:00:00.068)       0:00:02.343 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 29 March 2025  18:24:51 -0400 (0:00:03.060)       0:00:05.404 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 29 March 2025  18:24:52 -0400 (0:00:00.041)       0:00:05.445 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 29 March 2025  18:24:52 -0400 (0:00:00.040)       0:00:05.485 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32
Saturday 29 March 2025  18:24:52 -0400 (0:00:00.892)       0:00:06.378 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 29 March 2025  18:24:53 -0400 (0:00:00.142)       0:00:06.521 ******** 

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 29 March 2025  18:24:53 -0400 (0:00:00.026)       0:00:06.547 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 29 March 2025  18:24:53 -0400 (0:00:00.026)       0:00:06.573 ******** 

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38
Saturday 29 March 2025  18:24:53 -0400 (0:00:00.037)       0:00:06.611 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52
Saturday 29 March 2025  18:24:56 -0400 (0:00:02.931)       0:00:09.542 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-dnssrv@.service": {
                "name": "chrony-dnssrv@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "halt-local.service": {
                "name": "halt-local.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "import-state.service": {
                "name": "import-state.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iprdump.service": {
                "name": "iprdump.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprinit.service": {
                "name": "iprinit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprupdate.service": {
                "name": "iprupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "loadmodules.service": {
                "name": "loadmodules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-pvscan@.service": {
                "name": "lvm2-pvscan@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "lvm2-pvscan@8:0.service": {
                "name": "lvm2-pvscan@8:0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:16.service": {
                "name": "lvm2-pvscan@8:16.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:32.service": {
                "name": "lvm2-pvscan@8:32.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "messagebus.service": {
                "name": "messagebus.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-convert.service": {
                "name": "nfs-convert.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "active",
                "status": "enabled"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "masked"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "tcsd.service": {
                "name": "tcsd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "timedatex.service": {
                "name": "timedatex.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "tuned.service": {
                "name": "tuned.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "vdo-start-by-dev@.service": {
                "name": "vdo-start-by-dev@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "vdo.service": {
                "name": "vdo.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58
Saturday 29 March 2025  18:24:58 -0400 (0:00:01.894)       0:00:11.436 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64
Saturday 29 March 2025  18:24:58 -0400 (0:00:00.057)       0:00:11.493 ******** 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70
Saturday 29 March 2025  18:24:58 -0400 (0:00:00.022)       0:00:11.516 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85
Saturday 29 March 2025  18:24:58 -0400 (0:00:00.547)       0:00:12.064 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92
Saturday 29 March 2025  18:24:58 -0400 (0:00:00.024)       0:00:12.089 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743287000.5551405,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ab8070345774adad92683e9645714452be7be474",
        "ctime": 1743286961.5000908,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 224396032,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1743286961.499091,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1343,
        "uid": 0,
        "version": "600044070",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.368)       0:00:12.457 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.024)       0:00:12.481 ******** 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.020)       0:00:12.502 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.030)       0:00:12.532 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.035)       0:00:12.567 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.034)       0:00:12.602 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.036)       0:00:12.638 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.034)       0:00:12.672 ******** 

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.028)       0:00:12.701 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.023)       0:00:12.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.032)       0:00:12.757 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743286813.1608264,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.387)       0:00:13.144 ******** 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224
Saturday 29 March 2025  18:24:59 -0400 (0:00:00.038)       0:00:13.183 ******** 
ok: [managed-node3]

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:25
Saturday 29 March 2025  18:25:00 -0400 (0:00:00.870)       0:00:14.053 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node3

TASK [Check if system is ostree] ***********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5
Saturday 29 March 2025  18:25:00 -0400 (0:00:00.084)       0:00:14.137 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Set flag to indicate system is ostree] ***********************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10
Saturday 29 March 2025  18:25:01 -0400 (0:00:00.434)       0:00:14.571 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__snapshot_is_ostree": false
    },
    "changed": false
}

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14
Saturday 29 March 2025  18:25:01 -0400 (0:00:00.036)       0:00:14.608 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23
Saturday 29 March 2025  18:25:04 -0400 (0:00:02.958)       0:00:17.566 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi",
        "sdj"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31
Saturday 29 March 2025  18:25:05 -0400 (0:00:01.693)       0:00:19.260 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi",
            "sdj"
        ]
    },
    "changed": false
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36
Saturday 29 March 2025  18:25:05 -0400 (0:00:00.051)       0:00:19.311 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi",
        "sdj"
    ]
}

TASK [Print info from find_unused_disk] ****************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44
Saturday 29 March 2025  18:25:05 -0400 (0:00:00.026)       0:00:19.338 ******** 
skipping: [managed-node3] => {}

TASK [Show disk information] ***************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49
Saturday 29 March 2025  18:25:05 -0400 (0:00:00.028)       0:00:19.366 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58
Saturday 29 March 2025  18:25:05 -0400 (0:00:00.029)       0:00:19.396 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Create LVM logical volumes under volume groups] **************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:31
Saturday 29 March 2025  18:25:05 -0400 (0:00:00.027)       0:00:19.423 ******** 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.053)       0:00:19.476 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.033)       0:00:19.510 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.077)       0:00:19.588 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.058)       0:00:19.647 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.022)       0:00:19.669 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.022)       0:00:19.691 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.020)       0:00:19.712 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.022)       0:00:19.735 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 29 March 2025  18:25:06 -0400 (0:00:00.049)       0:00:19.785 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 29 March 2025  18:25:09 -0400 (0:00:02.840)       0:00:22.625 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc"
            ],
            "name": "test_vg1",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "name": "lv1",
                    "size": "15%"
                },
                {
                    "fs_type": "xfs",
                    "name": "lv2",
                    "size": "50%"
                }
            ]
        },
        {
            "disks": [
                "sdd",
                "sde",
                "sdf"
            ],
            "name": "test_vg2",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "name": "lv3",
                    "size": "10%"
                },
                {
                    "fs_type": "xfs",
                    "name": "lv4",
                    "size": "20%"
                }
            ]
        },
        {
            "disks": [
                "sdg",
                "sdh",
                "sdi",
                "sdj"
            ],
            "name": "test_vg3",
            "volumes": [
                {
                    "fs_type": "xfs",
                    "name": "lv5",
                    "size": "30%"
                },
                {
                    "fs_type": "xfs",
                    "name": "lv6",
                    "size": "25%"
                },
                {
                    "fs_type": "xfs",
                    "name": "lv7",
                    "size": "10%"
                },
                {
                    "fs_type": "xfs",
                    "name": "lv8",
                    "size": "10%"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 29 March 2025  18:25:09 -0400 (0:00:00.036)       0:00:22.661 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 29 March 2025  18:25:09 -0400 (0:00:00.022)       0:00:22.684 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32
Saturday 29 March 2025  18:25:13 -0400 (0:00:03.935)       0:00:26.620 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 29 March 2025  18:25:13 -0400 (0:00:00.039)       0:00:26.660 ******** 

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 29 March 2025  18:25:13 -0400 (0:00:00.021)       0:00:26.682 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 29 March 2025  18:25:13 -0400 (0:00:00.022)       0:00:26.704 ******** 

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38
Saturday 29 March 2025  18:25:13 -0400 (0:00:00.021)       0:00:26.726 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx lvm2 xfsprogs

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52
Saturday 29 March 2025  18:25:16 -0400 (0:00:02.892)       0:00:29.618 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-dnssrv@.service": {
                "name": "chrony-dnssrv@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "halt-local.service": {
                "name": "halt-local.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "import-state.service": {
                "name": "import-state.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iprdump.service": {
                "name": "iprdump.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprinit.service": {
                "name": "iprinit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprupdate.service": {
                "name": "iprupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "loadmodules.service": {
                "name": "loadmodules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-pvscan@.service": {
                "name": "lvm2-pvscan@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "lvm2-pvscan@8:0.service": {
                "name": "lvm2-pvscan@8:0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:16.service": {
                "name": "lvm2-pvscan@8:16.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:32.service": {
                "name": "lvm2-pvscan@8:32.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "messagebus.service": {
                "name": "messagebus.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-convert.service": {
                "name": "nfs-convert.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "active",
                "status": "enabled"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "masked"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "tcsd.service": {
                "name": "tcsd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "timedatex.service": {
                "name": "timedatex.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "tuned.service": {
                "name": "tuned.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "vdo-start-by-dev@.service": {
                "name": "vdo-start-by-dev@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "vdo.service": {
                "name": "vdo.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58
Saturday 29 March 2025  18:25:17 -0400 (0:00:01.751)       0:00:31.370 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64
Saturday 29 March 2025  18:25:18 -0400 (0:00:00.086)       0:00:31.456 ******** 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70
Saturday 29 March 2025  18:25:18 -0400 (0:00:00.046)       0:00:31.502 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdj",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sdi",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sdh",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sdg",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/test_vg3",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg3-lv8",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg3-lv8",
            "fs_type": "xfs"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg3-lv7",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg3-lv7",
            "fs_type": "xfs"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg3-lv6",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg3-lv6",
            "fs_type": "xfs"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg3-lv5",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg3-lv5",
            "fs_type": "xfs"
        },
        {
            "action": "create format",
            "device": "/dev/sdf",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sde",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sdd",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/test_vg2",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg2-lv4",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg2-lv4",
            "fs_type": "xfs"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg2-lv3",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg2-lv3",
            "fs_type": "xfs"
        },
        {
            "action": "create format",
            "device": "/dev/sdc",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/test_vg1",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg1-lv2",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg1-lv2",
            "fs_type": "xfs"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/test_vg1-lv1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/test_vg1-lv1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdk",
        "/dev/sdl",
        "/dev/xvda1",
        "/dev/mapper/test_vg1-lv1",
        "/dev/mapper/test_vg1-lv2",
        "/dev/mapper/test_vg2-lv3",
        "/dev/mapper/test_vg2-lv4",
        "/dev/mapper/test_vg3-lv5",
        "/dev/mapper/test_vg3-lv6",
        "/dev/mapper/test_vg3-lv7",
        "/dev/mapper/test_vg3-lv8"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg1-lv1",
                    "_kernel_device": "/dev/dm-7",
                    "_mount_id": "/dev/mapper/test_vg1-lv1",
                    "_raw_device": "/dev/mapper/test_vg1-lv1",
                    "_raw_kernel_device": "/dev/dm-7",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "15%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg1-lv2",
                    "_kernel_device": "/dev/dm-6",
                    "_mount_id": "/dev/mapper/test_vg1-lv2",
                    "_raw_device": "/dev/mapper/test_vg1-lv2",
                    "_raw_kernel_device": "/dev/dm-6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "50%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        },
        {
            "disks": [
                "sdd",
                "sde",
                "sdf"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg2-lv3",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "/dev/mapper/test_vg2-lv3",
                    "_raw_device": "/dev/mapper/test_vg2-lv3",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv3",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "10%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg2-lv4",
                    "_kernel_device": "/dev/dm-4",
                    "_mount_id": "/dev/mapper/test_vg2-lv4",
                    "_raw_device": "/dev/mapper/test_vg2-lv4",
                    "_raw_kernel_device": "/dev/dm-4",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv4",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "20%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        },
        {
            "disks": [
                "sdg",
                "sdh",
                "sdi",
                "sdj"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg3",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg3-lv5",
                    "_kernel_device": "/dev/dm-3",
                    "_mount_id": "/dev/mapper/test_vg3-lv5",
                    "_raw_device": "/dev/mapper/test_vg3-lv5",
                    "_raw_kernel_device": "/dev/dm-3",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv5",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "30%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv6",
                    "_kernel_device": "/dev/dm-2",
                    "_mount_id": "/dev/mapper/test_vg3-lv6",
                    "_raw_device": "/dev/mapper/test_vg3-lv6",
                    "_raw_kernel_device": "/dev/dm-2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv6",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "25%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv7",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/test_vg3-lv7",
                    "_raw_device": "/dev/mapper/test_vg3-lv7",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv7",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "10%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv8",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/test_vg3-lv8",
                    "_raw_device": "/dev/mapper/test_vg3-lv8",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "",
                    "mount_user": null,
                    "name": "lv8",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "10%",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85
Saturday 29 March 2025  18:25:26 -0400 (0:00:08.410)       0:00:39.912 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "udevadm",
        "trigger",
        "--subsystem-match=block"
    ],
    "delta": "0:00:00.016643",
    "end": "2025-03-29 18:25:26.950209",
    "rc": 0,
    "start": "2025-03-29 18:25:26.933566"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92
Saturday 29 March 2025  18:25:27 -0400 (0:00:00.666)       0:00:40.579 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743287000.5551405,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ab8070345774adad92683e9645714452be7be474",
        "ctime": 1743286961.5000908,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 224396032,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1743286961.499091,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1343,
        "uid": 0,
        "version": "600044070",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97
Saturday 29 March 2025  18:25:27 -0400 (0:00:00.443)       0:00:41.022 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.569)       0:00:41.592 ******** 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.038)       0:00:41.630 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdj",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sdi",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sdh",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sdg",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/test_vg3",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg3-lv8",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg3-lv8",
                "fs_type": "xfs"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg3-lv7",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg3-lv7",
                "fs_type": "xfs"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg3-lv6",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg3-lv6",
                "fs_type": "xfs"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg3-lv5",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg3-lv5",
                "fs_type": "xfs"
            },
            {
                "action": "create format",
                "device": "/dev/sdf",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sde",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sdd",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/test_vg2",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg2-lv4",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg2-lv4",
                "fs_type": "xfs"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg2-lv3",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg2-lv3",
                "fs_type": "xfs"
            },
            {
                "action": "create format",
                "device": "/dev/sdc",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/test_vg1",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg1-lv2",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg1-lv2",
                "fs_type": "xfs"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/test_vg1-lv1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/test_vg1-lv1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdk",
            "/dev/sdl",
            "/dev/xvda1",
            "/dev/mapper/test_vg1-lv1",
            "/dev/mapper/test_vg1-lv2",
            "/dev/mapper/test_vg2-lv3",
            "/dev/mapper/test_vg2-lv4",
            "/dev/mapper/test_vg3-lv5",
            "/dev/mapper/test_vg3-lv6",
            "/dev/mapper/test_vg3-lv7",
            "/dev/mapper/test_vg3-lv8"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg1-lv1",
                        "_kernel_device": "/dev/dm-7",
                        "_mount_id": "/dev/mapper/test_vg1-lv1",
                        "_raw_device": "/dev/mapper/test_vg1-lv1",
                        "_raw_kernel_device": "/dev/dm-7",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "15%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg1-lv2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "/dev/mapper/test_vg1-lv2",
                        "_raw_device": "/dev/mapper/test_vg1-lv2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "50%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdd",
                    "sde",
                    "sdf"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg2-lv3",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "/dev/mapper/test_vg2-lv3",
                        "_raw_device": "/dev/mapper/test_vg2-lv3",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv3",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg2-lv4",
                        "_kernel_device": "/dev/dm-4",
                        "_mount_id": "/dev/mapper/test_vg2-lv4",
                        "_raw_device": "/dev/mapper/test_vg2-lv4",
                        "_raw_kernel_device": "/dev/dm-4",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv4",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "20%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdg",
                    "sdh",
                    "sdi",
                    "sdj"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg3",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg3-lv5",
                        "_kernel_device": "/dev/dm-3",
                        "_mount_id": "/dev/mapper/test_vg3-lv5",
                        "_raw_device": "/dev/mapper/test_vg3-lv5",
                        "_raw_kernel_device": "/dev/dm-3",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv5",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "30%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv6",
                        "_kernel_device": "/dev/dm-2",
                        "_mount_id": "/dev/mapper/test_vg3-lv6",
                        "_raw_device": "/dev/mapper/test_vg3-lv6",
                        "_raw_kernel_device": "/dev/dm-2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv6",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "25%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv7",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/test_vg3-lv7",
                        "_raw_device": "/dev/mapper/test_vg3-lv7",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv7",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv8",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/test_vg3-lv8",
                        "_raw_device": "/dev/mapper/test_vg3-lv8",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv8",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.070)       0:00:41.701 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg1-lv1",
                        "_kernel_device": "/dev/dm-7",
                        "_mount_id": "/dev/mapper/test_vg1-lv1",
                        "_raw_device": "/dev/mapper/test_vg1-lv1",
                        "_raw_kernel_device": "/dev/dm-7",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "15%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg1-lv2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "/dev/mapper/test_vg1-lv2",
                        "_raw_device": "/dev/mapper/test_vg1-lv2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "50%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdd",
                    "sde",
                    "sdf"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg2-lv3",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "/dev/mapper/test_vg2-lv3",
                        "_raw_device": "/dev/mapper/test_vg2-lv3",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv3",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg2-lv4",
                        "_kernel_device": "/dev/dm-4",
                        "_mount_id": "/dev/mapper/test_vg2-lv4",
                        "_raw_device": "/dev/mapper/test_vg2-lv4",
                        "_raw_kernel_device": "/dev/dm-4",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv4",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "20%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdg",
                    "sdh",
                    "sdi",
                    "sdj"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg3",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg3-lv5",
                        "_kernel_device": "/dev/dm-3",
                        "_mount_id": "/dev/mapper/test_vg3-lv5",
                        "_raw_device": "/dev/mapper/test_vg3-lv5",
                        "_raw_kernel_device": "/dev/dm-3",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv5",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "30%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv6",
                        "_kernel_device": "/dev/dm-2",
                        "_mount_id": "/dev/mapper/test_vg3-lv6",
                        "_raw_device": "/dev/mapper/test_vg3-lv6",
                        "_raw_kernel_device": "/dev/dm-2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv6",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "25%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv7",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/test_vg3-lv7",
                        "_raw_device": "/dev/mapper/test_vg3-lv7",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv7",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv8",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/test_vg3-lv8",
                        "_raw_device": "/dev/mapper/test_vg3-lv8",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "",
                        "mount_user": null,
                        "name": "lv8",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "10%",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.054)       0:00:41.756 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.052)       0:00:41.808 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.054)       0:00:41.863 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.036)       0:00:41.899 ******** 

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.032)       0:00:41.932 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.032)       0:00:41.965 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.032)       0:00:41.998 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743286813.1608264,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.361)       0:00:42.359 ******** 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224
Saturday 29 March 2025  18:25:28 -0400 (0:00:00.022)       0:00:42.382 ******** 
ok: [managed-node3]

TASK [Run the snapshot role to create snapshot LVs] ****************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:49
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.804)       0:00:43.186 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.047)       0:00:43.234 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.035)       0:00:43.270 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.027)       0:00:43.297 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.024)       0:00:43.322 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.023)       0:00:43.345 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:29 -0400 (0:00:00.060)       0:00:43.406 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module snapshot] *******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:32 -0400 (0:00:02.870)       0:00:46.276 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:36 -0400 (0:00:04.023)       0:00:50.300 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:36 -0400 (0:00:00.049)       0:00:50.350 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:36 -0400 (0:00:00.038)       0:00:50.389 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.055)       0:00:50.445 ******** 
skipping: [managed-node3] => {}

TASK [Verify the snapshot LVs are created] *************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:58
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.039)       0:00:50.484 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.054)       0:00:50.538 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.041)       0:00:50.579 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.033)       0:00:50.613 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.025)       0:00:50.638 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.030)       0:00:50.669 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:37 -0400 (0:00:00.060)       0:00:50.729 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module check] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:40 -0400 (0:00:02.846)       0:00:53.576 ******** 
ok: [managed-node3] => {
    "changed": false,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:41 -0400 (0:00:01.778)       0:00:55.354 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": false,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:41 -0400 (0:00:00.026)       0:00:55.381 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": false,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:41 -0400 (0:00:00.026)       0:00:55.407 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.052)       0:00:55.460 ******** 
skipping: [managed-node3] => {}

TASK [Mount the snapshot for lv1] **********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:67
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.024)       0:00:55.485 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.064)       0:00:55.549 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.043)       0:00:55.592 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.028)       0:00:55.621 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.025)       0:00:55.646 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.024)       0:00:55.671 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:42 -0400 (0:00:00.061)       0:00:55.732 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:45 -0400 (0:00:02.875)       0:00:58.608 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.597)       0:00:59.205 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.027)       0:00:59.233 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.025)       0:00:59.258 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.024)       0:00:59.283 ******** 
skipping: [managed-node3] => {}

TASK [Assert changes for mount] ************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:78
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.023)       0:00:59.306 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Mount the snapshot for lv2] **********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:82
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.026)       0:00:59.333 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:45 -0400 (0:00:00.062)       0:00:59.395 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:46 -0400 (0:00:00.047)       0:00:59.443 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:46 -0400 (0:00:00.113)       0:00:59.556 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:46 -0400 (0:00:00.030)       0:00:59.586 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:46 -0400 (0:00:00.027)       0:00:59.614 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:46 -0400 (0:00:00.069)       0:00:59.684 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:49 -0400 (0:00:02.877)       0:01:02.561 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.591)       0:01:03.152 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.025)       0:01:03.178 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.025)       0:01:03.203 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.024)       0:01:03.228 ******** 
skipping: [managed-node3] => {}

TASK [Mount the snapshot for lv7] **********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:93
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.023)       0:01:03.252 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.062)       0:01:03.314 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.034)       0:01:03.349 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.027)       0:01:03.377 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.024)       0:01:03.401 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:49 -0400 (0:00:00.022)       0:01:03.424 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:50 -0400 (0:00:00.059)       0:01:03.484 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:52 -0400 (0:00:02.849)       0:01:06.334 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.581)       0:01:06.915 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.026)       0:01:06.941 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.025)       0:01:06.967 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.067)       0:01:07.034 ******** 
skipping: [managed-node3] => {}

TASK [Mount the origin for lv6] ************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:104
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.024)       0:01:07.058 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.066)       0:01:07.124 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.034)       0:01:07.159 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.029)       0:01:07.189 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.025)       0:01:07.214 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.024)       0:01:07.238 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:53 -0400 (0:00:00.061)       0:01:07.300 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:25:56 -0400 (0:00:02.866)       0:01:10.167 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.593)       0:01:10.760 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.027)       0:01:10.787 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.033)       0:01:10.821 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.023)       0:01:10.844 ******** 
skipping: [managed-node3] => {}

TASK [Mount the snapshot for lv1 again for idempotence] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:116
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.027)       0:01:10.872 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.063)       0:01:10.936 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.065)       0:01:11.002 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.027)       0:01:11.029 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.027)       0:01:11.057 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.025)       0:01:11.082 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:25:57 -0400 (0:00:00.074)       0:01:11.157 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:00 -0400 (0:00:02.888)       0:01:14.045 ******** 
ok: [managed-node3] => {
    "changed": false,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.585)       0:01:14.631 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": false,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.032)       0:01:14.663 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": false,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.026)       0:01:14.689 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.026)       0:01:14.716 ******** 
skipping: [managed-node3] => {}

TASK [Assert no changes for mount] *********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:127
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.024)       0:01:14.740 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Umount the snapshot for lv1] *********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:131
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.026)       0:01:14.767 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.072)       0:01:14.839 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.035)       0:01:14.875 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.029)       0:01:14.904 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.024)       0:01:14.928 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.024)       0:01:14.953 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:01 -0400 (0:00:00.101)       0:01:15.054 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:04 -0400 (0:00:02.911)       0:01:17.965 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.589)       0:01:18.555 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.025)       0:01:18.580 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.028)       0:01:18.608 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.023)       0:01:18.632 ******** 
skipping: [managed-node3] => {}

TASK [Assert changes for umount] ***********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:141
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.023)       0:01:18.655 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Umount again to check idempotence] ***************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:145
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.026)       0:01:18.682 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.071)       0:01:18.754 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.036)       0:01:18.790 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.028)       0:01:18.819 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.024)       0:01:18.844 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.025)       0:01:18.869 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:05 -0400 (0:00:00.062)       0:01:18.932 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:08 -0400 (0:00:02.966)       0:01:21.898 ******** 
ok: [managed-node3] => {
    "changed": false,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:08 -0400 (0:00:00.517)       0:01:22.416 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": false,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.026)       0:01:22.442 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": false,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.036)       0:01:22.479 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.031)       0:01:22.510 ******** 
skipping: [managed-node3] => {}

TASK [Assert no changes for umount] ********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:155
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.025)       0:01:22.536 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Umount the snapshot for lv2] *********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:159
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.025)       0:01:22.562 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.081)       0:01:22.643 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.035)       0:01:22.679 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.028)       0:01:22.708 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.024)       0:01:22.732 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.023)       0:01:22.755 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:09 -0400 (0:00:00.091)       0:01:22.847 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:12 -0400 (0:00:02.866)       0:01:25.713 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:12 -0400 (0:00:00.608)       0:01:26.322 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:12 -0400 (0:00:00.046)       0:01:26.368 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:12 -0400 (0:00:00.049)       0:01:26.418 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.042)       0:01:26.460 ******** 
skipping: [managed-node3] => {}

TASK [Umount the snapshot for lv7] *********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:169
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.042)       0:01:26.503 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.133)       0:01:26.637 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.060)       0:01:26.698 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.032)       0:01:26.730 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.024)       0:01:26.754 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.024)       0:01:26.779 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:13 -0400 (0:00:00.108)       0:01:26.888 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:16 -0400 (0:00:02.919)       0:01:29.807 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.627)       0:01:30.434 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.053)       0:01:30.488 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.048)       0:01:30.537 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.038)       0:01:30.575 ******** 
skipping: [managed-node3] => {}

TASK [Umount the origin for lv6] ***********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:179
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.037)       0:01:30.612 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.109)       0:01:30.721 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.043)       0:01:30.764 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.041)       0:01:30.806 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.042)       0:01:30.849 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.068)       0:01:30.917 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:17 -0400 (0:00:00.141)       0:01:31.059 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:20 -0400 (0:00:03.261)       0:01:34.320 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.587)       0:01:34.908 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.030)       0:01:34.939 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.026)       0:01:34.965 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.025)       0:01:34.990 ******** 
skipping: [managed-node3] => {}

TASK [Run the snapshot role remove the snapshot LVs] ***************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:189
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.023)       0:01:35.014 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.101)       0:01:35.116 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.056)       0:01:35.172 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.043)       0:01:35.216 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.039)       0:01:35.255 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.036)       0:01:35.291 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:21 -0400 (0:00:00.087)       0:01:35.378 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:24 -0400 (0:00:03.012)       0:01:38.391 ******** 
changed: [managed-node3] => {
    "changed": true,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:27 -0400 (0:00:02.909)       0:01:41.300 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": true,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:27 -0400 (0:00:00.029)       0:01:41.330 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": true,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:27 -0400 (0:00:00.035)       0:01:41.365 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:27 -0400 (0:00:00.028)       0:01:41.394 ******** 
skipping: [managed-node3] => {}

TASK [Use the snapshot_lvm_verify option to make sure remove is done] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:196
Saturday 29 March 2025  18:26:27 -0400 (0:00:00.022)       0:01:41.416 ******** 

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.098)       0:01:41.515 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.035)       0:01:41.551 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] **********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.027)       0:01:41.579 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.023)       0:01:41.603 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.025)       0:01:41.628 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "__snapshot_packages": [
            "lvm2",
            "util-linux"
        ],
        "__snapshot_python": "/usr/libexec/platform-python"
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6
Saturday 29 March 2025  18:26:28 -0400 (0:00:00.060)       0:01:41.688 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: lvm2 util-linux

TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] *********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14
Saturday 29 March 2025  18:26:31 -0400 (0:00:02.891)       0:01:44.580 ******** 
ok: [managed-node3] => {
    "changed": false,
    "errors": "",
    "message": "",
    "return_code": 0
}

TASK [fedora.linux_system_roles.snapshot : Print out response] *****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40
Saturday 29 March 2025  18:26:32 -0400 (0:00:01.243)       0:01:45.823 ******** 
ok: [managed-node3] => {
    "snapshot_cmd": {
        "changed": false,
        "errors": "",
        "failed": false,
        "message": "",
        "msg": "",
        "return_code": 0
    }
}

TASK [fedora.linux_system_roles.snapshot : Set result] *************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:45
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.065)       0:01:45.888 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "snapshot_cmd": {
            "changed": false,
            "errors": "",
            "failed": false,
            "message": "",
            "msg": "",
            "return_code": 0
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.065)       0:01:45.954 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.snapshot : Show errors] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:54
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.035)       0:01:45.989 ******** 
skipping: [managed-node3] => {}

TASK [Cleanup] *****************************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:204
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.065)       0:01:46.055 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml for managed-node3

TASK [Remove storage volumes] **************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:7
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.114)       0:01:46.170 ******** 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.075)       0:01:46.245 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.061)       0:01:46.306 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.035)       0:01:46.342 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed-node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 29 March 2025  18:26:32 -0400 (0:00:00.070)       0:01:46.412 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 29 March 2025  18:26:33 -0400 (0:00:00.027)       0:01:46.439 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 29 March 2025  18:26:33 -0400 (0:00:00.027)       0:01:46.467 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 29 March 2025  18:26:33 -0400 (0:00:00.028)       0:01:46.495 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 29 March 2025  18:26:33 -0400 (0:00:00.036)       0:01:46.532 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 29 March 2025  18:26:33 -0400 (0:00:00.063)       0:01:46.595 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 29 March 2025  18:26:36 -0400 (0:00:02.977)       0:01:49.573 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc"
            ],
            "name": "test_vg1",
            "state": "absent",
            "volumes": [
                {
                    "name": "lv1",
                    "state": "absent"
                },
                {
                    "name": "lv2",
                    "state": "absent"
                }
            ]
        },
        {
            "disks": [
                "sdd",
                "sde",
                "sdf"
            ],
            "name": "test_vg2",
            "state": "absent",
            "volumes": [
                {
                    "name": "lv3",
                    "state": "absent"
                },
                {
                    "name": "lv4",
                    "state": "absent"
                }
            ]
        },
        {
            "disks": [
                "sdg",
                "sdh",
                "sdi",
                "sdj"
            ],
            "name": "test_vg3",
            "state": "absent",
            "volumes": [
                {
                    "name": "lv5",
                    "state": "absent"
                },
                {
                    "name": "lv6",
                    "state": "absent"
                },
                {
                    "name": "lv7",
                    "state": "absent"
                },
                {
                    "name": "lv8",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 29 March 2025  18:26:36 -0400 (0:00:00.056)       0:01:49.629 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 29 March 2025  18:26:36 -0400 (0:00:00.036)       0:01:49.666 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32
Saturday 29 March 2025  18:26:41 -0400 (0:00:05.509)       0:01:55.176 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 29 March 2025  18:26:41 -0400 (0:00:00.043)       0:01:55.220 ******** 

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 29 March 2025  18:26:41 -0400 (0:00:00.024)       0:01:55.244 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 29 March 2025  18:26:41 -0400 (0:00:00.026)       0:01:55.271 ******** 

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38
Saturday 29 March 2025  18:26:41 -0400 (0:00:00.023)       0:01:55.295 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52
Saturday 29 March 2025  18:26:44 -0400 (0:00:02.859)       0:01:58.154 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-dnssrv@.service": {
                "name": "chrony-dnssrv@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "halt-local.service": {
                "name": "halt-local.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "import-state.service": {
                "name": "import-state.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iprdump.service": {
                "name": "iprdump.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprinit.service": {
                "name": "iprinit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprupdate.service": {
                "name": "iprupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "loadmodules.service": {
                "name": "loadmodules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-pvscan@.service": {
                "name": "lvm2-pvscan@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "lvm2-pvscan@8:0.service": {
                "name": "lvm2-pvscan@8:0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:112.service": {
                "name": "lvm2-pvscan@8:112.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:128.service": {
                "name": "lvm2-pvscan@8:128.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:144.service": {
                "name": "lvm2-pvscan@8:144.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:16.service": {
                "name": "lvm2-pvscan@8:16.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:32.service": {
                "name": "lvm2-pvscan@8:32.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:48.service": {
                "name": "lvm2-pvscan@8:48.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:64.service": {
                "name": "lvm2-pvscan@8:64.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:80.service": {
                "name": "lvm2-pvscan@8:80.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "lvm2-pvscan@8:96.service": {
                "name": "lvm2-pvscan@8:96.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "messagebus.service": {
                "name": "messagebus.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-convert.service": {
                "name": "nfs-convert.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "active",
                "status": "enabled"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "masked"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "tcsd.service": {
                "name": "tcsd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "timedatex.service": {
                "name": "timedatex.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "tuned.service": {
                "name": "tuned.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "vdo-start-by-dev@.service": {
                "name": "vdo-start-by-dev@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "vdo.service": {
                "name": "vdo.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58
Saturday 29 March 2025  18:26:46 -0400 (0:00:01.646)       0:01:59.801 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64
Saturday 29 March 2025  18:26:46 -0400 (0:00:00.057)       0:01:59.859 ******** 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70
Saturday 29 March 2025  18:26:46 -0400 (0:00:00.025)       0:01:59.884 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg3-lv8",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg3-lv8",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg3-lv7",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg3-lv7",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg3-lv6",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg3-lv6",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg3-lv5",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg3-lv5",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/test_vg3",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sdg",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdj",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdi",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdh",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg2-lv4",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg2-lv4",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg2-lv3",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg2-lv3",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/test_vg2",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sdd",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdf",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sde",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg1-lv2",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg1-lv2",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/test_vg1-lv1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/test_vg1-lv1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/test_vg1",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdc",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdk",
        "/dev/sdl",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/sdj",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg1-lv1",
                    "_mount_id": "/dev/mapper/test_vg1-lv1",
                    "_raw_device": "/dev/mapper/test_vg1-lv1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 482344960,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg1-lv2",
                    "_mount_id": "/dev/mapper/test_vg1-lv2",
                    "_raw_device": "/dev/mapper/test_vg1-lv2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 1606418432,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        },
        {
            "disks": [
                "sdd",
                "sde",
                "sdf"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg2-lv3",
                    "_mount_id": "/dev/mapper/test_vg2-lv3",
                    "_raw_device": "/dev/mapper/test_vg2-lv3",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdd",
                        "sde",
                        "sdf"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv3",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 322961408,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg2-lv4",
                    "_mount_id": "/dev/mapper/test_vg2-lv4",
                    "_raw_device": "/dev/mapper/test_vg2-lv4",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdd",
                        "sde",
                        "sdf"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv4",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 641728512,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        },
        {
            "disks": [
                "sdg",
                "sdh",
                "sdi",
                "sdj"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "test_vg3",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/test_vg3-lv5",
                    "_mount_id": "/dev/mapper/test_vg3-lv5",
                    "_raw_device": "/dev/mapper/test_vg3-lv5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdg",
                        "sdh",
                        "sdi",
                        "sdj"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv5",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 1283457024,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv6",
                    "_mount_id": "/dev/mapper/test_vg3-lv6",
                    "_raw_device": "/dev/mapper/test_vg3-lv6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdg",
                        "sdh",
                        "sdi",
                        "sdj"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv6",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 1069547520,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv7",
                    "_mount_id": "/dev/mapper/test_vg3-lv7",
                    "_raw_device": "/dev/mapper/test_vg3-lv7",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdg",
                        "sdh",
                        "sdi",
                        "sdj"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv7",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 427819008,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/mapper/test_vg3-lv8",
                    "_mount_id": "/dev/mapper/test_vg3-lv8",
                    "_raw_device": "/dev/mapper/test_vg3-lv8",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sdg",
                        "sdh",
                        "sdi",
                        "sdj"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": null,
                    "mount_user": null,
                    "name": "lv8",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 427819008,
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85
Saturday 29 March 2025  18:26:56 -0400 (0:00:09.743)       0:02:09.627 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "udevadm",
        "trigger",
        "--subsystem-match=block"
    ],
    "delta": "0:00:00.015573",
    "end": "2025-03-29 18:26:56.544768",
    "rc": 0,
    "start": "2025-03-29 18:26:56.529195"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92
Saturday 29 March 2025  18:26:56 -0400 (0:00:00.513)       0:02:10.141 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743287000.5551405,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ab8070345774adad92683e9645714452be7be474",
        "ctime": 1743286961.5000908,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 224396032,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1743286961.499091,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1343,
        "uid": 0,
        "version": "600044070",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.357)       0:02:10.499 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.361)       0:02:10.860 ******** 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.059)       0:02:10.919 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg3-lv8",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg3-lv8",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg3-lv7",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg3-lv7",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg3-lv6",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg3-lv6",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg3-lv5",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg3-lv5",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/test_vg3",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sdg",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdj",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdi",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdh",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg2-lv4",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg2-lv4",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg2-lv3",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg2-lv3",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/test_vg2",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sdd",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdf",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sde",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg1-lv2",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg1-lv2",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/test_vg1-lv1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/test_vg1-lv1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/test_vg1",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdc",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdk",
            "/dev/sdl",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/sdj",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg1-lv1",
                        "_mount_id": "/dev/mapper/test_vg1-lv1",
                        "_raw_device": "/dev/mapper/test_vg1-lv1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 482344960,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg1-lv2",
                        "_mount_id": "/dev/mapper/test_vg1-lv2",
                        "_raw_device": "/dev/mapper/test_vg1-lv2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1606418432,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdd",
                    "sde",
                    "sdf"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg2-lv3",
                        "_mount_id": "/dev/mapper/test_vg2-lv3",
                        "_raw_device": "/dev/mapper/test_vg2-lv3",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdd",
                            "sde",
                            "sdf"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv3",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 322961408,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg2-lv4",
                        "_mount_id": "/dev/mapper/test_vg2-lv4",
                        "_raw_device": "/dev/mapper/test_vg2-lv4",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdd",
                            "sde",
                            "sdf"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv4",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 641728512,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdg",
                    "sdh",
                    "sdi",
                    "sdj"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg3",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg3-lv5",
                        "_mount_id": "/dev/mapper/test_vg3-lv5",
                        "_raw_device": "/dev/mapper/test_vg3-lv5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv5",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1283457024,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv6",
                        "_mount_id": "/dev/mapper/test_vg3-lv6",
                        "_raw_device": "/dev/mapper/test_vg3-lv6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv6",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1069547520,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv7",
                        "_mount_id": "/dev/mapper/test_vg3-lv7",
                        "_raw_device": "/dev/mapper/test_vg3-lv7",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv7",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 427819008,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv8",
                        "_mount_id": "/dev/mapper/test_vg3-lv8",
                        "_raw_device": "/dev/mapper/test_vg3-lv8",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv8",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 427819008,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.042)       0:02:10.962 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg1-lv1",
                        "_mount_id": "/dev/mapper/test_vg1-lv1",
                        "_raw_device": "/dev/mapper/test_vg1-lv1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 482344960,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg1-lv2",
                        "_mount_id": "/dev/mapper/test_vg1-lv2",
                        "_raw_device": "/dev/mapper/test_vg1-lv2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1606418432,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdd",
                    "sde",
                    "sdf"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg2-lv3",
                        "_mount_id": "/dev/mapper/test_vg2-lv3",
                        "_raw_device": "/dev/mapper/test_vg2-lv3",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdd",
                            "sde",
                            "sdf"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv3",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 322961408,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg2-lv4",
                        "_mount_id": "/dev/mapper/test_vg2-lv4",
                        "_raw_device": "/dev/mapper/test_vg2-lv4",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdd",
                            "sde",
                            "sdf"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv4",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 641728512,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            },
            {
                "disks": [
                    "sdg",
                    "sdh",
                    "sdi",
                    "sdj"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "test_vg3",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/test_vg3-lv5",
                        "_mount_id": "/dev/mapper/test_vg3-lv5",
                        "_raw_device": "/dev/mapper/test_vg3-lv5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv5",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1283457024,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv6",
                        "_mount_id": "/dev/mapper/test_vg3-lv6",
                        "_raw_device": "/dev/mapper/test_vg3-lv6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv6",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 1069547520,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv7",
                        "_mount_id": "/dev/mapper/test_vg3-lv7",
                        "_raw_device": "/dev/mapper/test_vg3-lv7",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv7",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 427819008,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/mapper/test_vg3-lv8",
                        "_mount_id": "/dev/mapper/test_vg3-lv8",
                        "_raw_device": "/dev/mapper/test_vg3-lv8",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sdg",
                            "sdh",
                            "sdi",
                            "sdj"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": null,
                        "mount_user": null,
                        "name": "lv8",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 427819008,
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.056)       0:02:11.019 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.030)       0:02:11.049 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.023)       0:02:11.073 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.026)       0:02:11.100 ******** 

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.024)       0:02:11.124 ******** 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.027)       0:02:11.151 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197
Saturday 29 March 2025  18:26:57 -0400 (0:00:00.025)       0:02:11.177 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1743286813.1608264,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202
Saturday 29 March 2025  18:26:58 -0400 (0:00:00.371)       0:02:11.549 ******** 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224
Saturday 29 March 2025  18:26:58 -0400 (0:00:00.043)       0:02:11.593 ******** 
ok: [managed-node3]

TASK [Save unused_disk_return before verify] ***********************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:30
Saturday 29 March 2025  18:26:59 -0400 (0:00:00.865)       0:02:12.458 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks_before": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi",
            "sdj"
        ]
    },
    "changed": false
}

TASK [Verify that pools/volumes used in test are removed] **********************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:34
Saturday 29 March 2025  18:26:59 -0400 (0:00:00.052)       0:02:12.511 ******** 
included: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node3

TASK [Check if system is ostree] ***********************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5
Saturday 29 March 2025  18:26:59 -0400 (0:00:00.059)       0:02:12.570 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set flag to indicate system is ostree] ***********************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10
Saturday 29 March 2025  18:26:59 -0400 (0:00:00.034)       0:02:12.605 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14
Saturday 29 March 2025  18:26:59 -0400 (0:00:00.033)       0:02:12.638 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23
Saturday 29 March 2025  18:27:02 -0400 (0:00:02.935)       0:02:15.574 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi",
        "sdj"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"1073741824\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.465)       0:02:16.039 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi",
            "sdj"
        ]
    },
    "changed": false
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.050)       0:02:16.089 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi",
        "sdj"
    ]
}

TASK [Print info from find_unused_disk] ****************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.036)       0:02:16.126 ******** 
skipping: [managed-node3] => {}

TASK [Show disk information] ***************************************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.026)       0:02:16.153 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.024)       0:02:16.178 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Debug why list of unused disks has changed] ******************************
task path: /tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:40
Saturday 29 March 2025  18:27:02 -0400 (0:00:00.032)       0:02:16.211 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}
META: ran handlers
META: ran handlers

PLAY RECAP *********************************************************************
managed-node3              : ok=172  changed=12   unreachable=0    failed=0    skipped=121  rescued=0    ignored=0   

Saturday 29 March 2025  18:27:02 -0400 (0:00:00.030)       0:02:16.242 ******** 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 9.74s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 8.41s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 
fedora.linux_system_roles.storage : Get required packages --------------- 5.51s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 
fedora.linux_system_roles.snapshot : Run snapshot module snapshot ------- 4.02s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14 
fedora.linux_system_roles.storage : Get required packages --------------- 3.94s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 3.26s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 3.06s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 3.01s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.98s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.97s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
Ensure test packages ---------------------------------------------------- 2.96s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 
Ensure test packages ---------------------------------------------------- 2.94s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.93s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.92s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.snapshot : Run snapshot module remove --------- 2.91s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:14 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.89s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.89s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.89s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.88s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 
fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 2.87s
/tmp/collections-0OW/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6