ansible-playbook [core 2.17.7]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-IyT
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.8 (main, Dec  3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml

PLAY [all] *********************************************************************

TASK [Include vault variables] *************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Monday 06 January 2025  13:42:29 -0500 (0:00:00.012)       0:00:00.012 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_test_password": {
            "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
        },
        "mysql_container_root_password": {
            "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
        }
    },
    "ansible_included_var_files": [
        "/tmp/podman-Y0F/tests/vars/vault-variables.yml"
    ],
    "changed": false
}

PLAY [Deploy the quadlet demo app] *********************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Monday 06 January 2025  13:42:29 -0500 (0:00:00.039)       0:00:00.051 ******** 
[WARNING]: Platform linux on host managed-node3 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node3]

TASK [Test is only supported on x86_64] ****************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38
Monday 06 January 2025  13:42:30 -0500 (0:00:01.553)       0:00:01.605 ******** 
skipping: [managed-node3] => {
    "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\""
}

TASK [End test] ****************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45
Monday 06 January 2025  13:42:30 -0500 (0:00:00.015)       0:00:01.620 ******** 
META: end_play conditional evaluated to False, continuing play
skipping: [managed-node3] => {
    "skip_reason": "end_play conditional evaluated to False, continuing play"
}

MSG:

end_play

TASK [Generate certificates] ***************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51
Monday 06 January 2025  13:42:30 -0500 (0:00:00.008)       0:00:01.629 ******** 
included: fedora.linux_system_roles.certificate for managed-node3

TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Monday 06 January 2025  13:42:30 -0500 (0:00:00.038)       0:00:01.667 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Monday 06 January 2025  13:42:30 -0500 (0:00:00.024)       0:00:01.691 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Monday 06 January 2025  13:42:31 -0500 (0:00:00.039)       0:00:01.731 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Monday 06 January 2025  13:42:31 -0500 (0:00:00.510)       0:00:02.241 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__certificate_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19
Monday 06 January 2025  13:42:31 -0500 (0:00:00.045)       0:00:02.287 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__certificate_certmonger_packages": [
            "certmonger",
            "python3-packaging"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__certificate_certmonger_packages": [
            "certmonger",
            "python3-packaging"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Monday 06 January 2025  13:42:31 -0500 (0:00:00.077)       0:00:02.364 ******** 
changed: [managed-node3] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: python3-cffi-1.16.0-7.el10.x86_64",
        "Installed: python3-pyasn1-0.6.1-1.el10.noarch",
        "Installed: python3-cryptography-43.0.0-4.el10.x86_64",
        "Installed: python3-ply-3.11-25.el10.noarch",
        "Installed: python3-pycparser-2.20-16.el10.noarch"
    ]
}

TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
Monday 06 January 2025  13:42:34 -0500 (0:00:02.691)       0:00:05.055 ******** 
changed: [managed-node3] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: nss-sysinit-3.101.0-13.el10.x86_64",
        "Installed: nss-util-3.101.0-13.el10.x86_64",
        "Installed: python3-packaging-23.2-6.el10.noarch",
        "Installed: certmonger-0.79.20-3.el10.x86_64",
        "Installed: dbus-tools-1:1.14.10-5.el10.x86_64",
        "Installed: nspr-4.35.0-34.el10.x86_64",
        "Installed: nss-3.101.0-13.el10.x86_64",
        "Installed: nss-softokn-3.101.0-13.el10.x86_64",
        "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64"
    ]
}

TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35
Monday 06 January 2025  13:42:37 -0500 (0:00:02.899)       0:00:07.954 ******** 
changed: [managed-node3] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/etc/certmonger//pre-scripts",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61
Monday 06 January 2025  13:42:37 -0500 (0:00:00.533)       0:00:08.488 ******** 
changed: [managed-node3] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/etc/certmonger//post-scripts",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90
Monday 06 January 2025  13:42:38 -0500 (0:00:00.384)       0:00:08.872 ******** 
changed: [managed-node3] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "enabled": true,
    "name": "certmonger",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "system.slice dbus-broker.service basic.target systemd-journald.socket sysinit.target syslog.target network.target dbus.socket",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedorahosted.certmonger",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "Certificate monitoring and PKI enrollment",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/certmonger.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "certmonger.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3166289920",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "certmonger.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "PIDFile": "/run/certmonger.pid",
        "PartOf": "dbus-broker.service",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "sysinit.target system.slice dbus.socket",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101
Monday 06 January 2025  13:42:39 -0500 (0:00:01.072)       0:00:09.945 ******** 
changed: [managed-node3] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "ca": "self-sign",
        "dns": [
            "localhost"
        ],
        "name": "quadlet_demo"
    }
}

MSG:

Certificate requested (new).

TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
Monday 06 January 2025  13:42:40 -0500 (0:00:00.910)       0:00:10.855 ******** 
ok: [managed-node3] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUs0c2RJMVJnay9BaTFMeU1LOXUyMUF3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJGbApNbU0zTkRoa0xUVXhPREkwWm1Nd0xUaGlOVEptTWpNd0xXRm1ObVZrWWpSbU1CNFhEVEkxTURFd05qRTROREl6Ck9Wb1hEVEkyTURFd05qRTROREl6T1Zvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFybzUvR3ZSSmVid1dGd0RGaXlVUHdOTlBOL1BvMGpTWQphaFBUK3RrMEhDZFJVSy9ZOHkzWkpqM0hld1Q3d0hrcnRDQTF6ODVGMG41b3A0M0RpYnFaK0FaQWRBZDhQL3JsCm9Hdlo5RGZJWExwUlNrY2pQcWJWTG9sK1YvU3NuZjJsU2RuVEtJcStvOFRkZEVRYXUwUGZabTVLWXN3aFp1dlUKMWZLcytmQXFFbUVOOFJldWxkOUl2RlJ0Qmt1bWtVZmRtdlNYYVUvaStQS0pLYy9hL2EzVlpwUTBDcVYvczNYdQp0OXB4RU5pRUIwL2FFV1Fia2RSV3g5MHBnOFQ3UE0rTStSZUR2M2dzQW4wcFgxaGdUckFxd0ZDa2VSQnd1Qnl3CmRtZWFlM3JBcU16a0xkVlJPR3BHTGZ1eGkzTXlmNEU5SzZhK0ZaRGx6N3VQNGd2N2R6Uk4wUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGRHByCnVobXlBNjcwUXdQR1pWT3VaMDlBc2NjV01COEdBMVVkSXdRWU1CYUFGR1F2NFNuMWNxYVhZZFZ6SmZzTlVjYysKTHBWbE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQ0tTdmdqbkpEUW5JYzY0a2tRUmVuUnRtVDAweTJ5YlREKwpZenJQU0t6SEhuenhYWTVBeldHdzhYQ1J0WXlldnEzanc3MTg3Q3BITzN4ZFpPcEtudVN4ejhFU29zN09MNjA3CktlM3BNQllYQUgzWlNMZjlIT215SHJIVTJqdERhT2xtM0hHQ25kN2htYXVaQkRFZVlyOUhlaFk4RlZtN3o3a2cKOWN1Si9aN1JxUDEzdVpqRUd3dmlGTWpaa1JzWlJtL1dmNjRpUVk1K0FVMERFYTRHbGFhZ2M1c0ZKTmI5R2hpTwpXV082QkhLWkYyZGpIZDYwblVYMFErQXo4ajE1c29obW5tekJCcTRBZE1qY0VrS2pUbDFMWEdOZ0ExakZFL2xTClFWUzFoalhaY0xXVi9LQ042NjMzY0l3Njd3YkM0UDg5dHYyaTJlKy8wOHViZzMxWE54enIKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
    "encoding": "base64",
    "item": [
        "cert",
        {
            "ca": "self-sign",
            "dns": [
                "localhost"
            ],
            "name": "quadlet_demo"
        }
    ],
    "source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed-node3] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ3VqbjhhOUVsNXZCWVgKQU1XTEpRL0EwMDgzOCtqU05KaHFFOVA2MlRRY0oxRlFyOWp6TGRrbVBjZDdCUHZBZVN1MElEWFB6a1hTZm1pbgpqY09KdXBuNEJrQjBCM3cvK3VXZ2E5bjBOOGhjdWxGS1J5TStwdFV1aVg1WDlLeWQvYVZKMmRNb2lyNmp4TjEwClJCcTdROTltYmtwaXpDRm02OVRWOHF6NThDb1NZUTN4RjY2VjMwaThWRzBHUzZhUlI5MmE5SmRwVCtMNDhva3AKejlyOXJkVm1sRFFLcFgremRlNjMybkVRMklRSFQ5b1JaQnVSMUZiSDNTbUR4UHM4ejR6NUY0Ty9lQ3dDZlNsZgpXR0JPc0NyQVVLUjVFSEM0SExCMlo1cDdlc0Nvek9RdDFWRTRha1l0KzdHTGN6Si9nVDBycHI0VmtPWFB1NC9pCkMvdDNORTNSQWdNQkFBRUNnZ0VBQkttVXRvMFkyMUlOU3Y3L3g4Z3RKQXJnM2s4N0JFWllYZGo2dkFRS01zaFEKRVRZcE50R2VMN25OdHJqNjJuakNGSlVacXR2SkhLYTR0K1QrK0pKTjQ0U1RPeVlxQ1BGMEtiVkh4SkNxM3VNeApJYWxGeFRvaWpwUW9sa1BZU240SWFMRVRSVTF6YXg5cWhOeiswOHFyMEdvWXZZdlRScEw5QTl2d0I5c0UzS25FCnJDZXJnQTUyZVpuMlFsczZwZVFUNWZ4b3dsRVBHeUlITmFGUHNkenBFQnZpYmtuV1hvM1BRWGNBT2R3NEJQMXEKbk1GL25SWG4xQ2RBM3BGQ1d5TTkvMGFwTm1MZlgvcVRTM2d5bG9CMWZVZ0QvblozZXdsWjNiZFc5R3pWUWxNQgozeFNsblNGYmF1R0FPM0VxVFc0VWNtbERqUWF0aUxaclhNVWNaVGUxNndLQmdRRGdKTnpjOVZRdms0OXJzdnN3CnVOZkhYSTZOL0FyZzFUd2gyOG9ZZ3Y3bTJEVU1mSHFiV2x4djhqREVTc1d1Nmo1RHlyQ2Nmbi9jN0I0dGZUbEUKanBjSXdkeUhkeTFSVjJaV3ZESWwrOTArMEhiQWdwR0FaaE14UXhUcEpxL3JpbVdnR2RDTStPVFpETVMyaWlTYwowV3ZTRDZaMVE2bFd3b0FpQkw3WlpZamp6d0tCZ1FESFhYa1ltRzZtTTc4L3VaMFhiVzFwaFRWeXNIN0pPUGlzCjBoUGwyRU9aZ3pFVTlRTE1Ub3M2U2pJRStxZ0UxT2djcGk2bklsRXlzN3grdUlGODJ5MHBzQ2FveFN3YTBXbU8KamQ4M3p1ZXdodGptVGxJeVJxZVp6cHFlcFhvVU16UkZkR1FFSGI0OExTVkV1R3RiNWxUb0FNVFhndmp6K2R5VQpPYzN6SmxqOFh3S0JnRzU0UEpKNkp4KzdyamY2WW1yRTd6UkduY3ZFcU9mZXNHQlBudTh0d011dFM1ZjMwY1czClowK1ZJZ3ZjZmttVEVWVUFGTU5NWldCUXp4N0VWUEZCQlpjL1ZKMC9VNHAvZE9UU3BEU3NTWUtuNDU5RnVzaDUKaG9RRTk4MWhUOXJhdnZPRlJZLzFLM1QrSWdXRldENGh5TzRpNEh0eStZYlZIYW1taFJsSVBZdHBBb0dBWU5KUQpnVlZtUjJsU05FeWNza2YrbndTR2xKQ0t3b2Jjbm5IWUlBUUxzSDQ2VkFjNXVLZGNqbzZDQkZFSFVIRmwyS1FNCmZEYjZZRGxTRGVqbFdoSEh1VHRIU3llVzU5OVQrRnVHVU1BM1hVL2YvdG1DcWRBT2pXLzlQNWIxMXVHWFBUUHQKYnVKVEJUNlhudXhCbDN3aE9CdEhmYktwM1RNM29QVUJ4RmlYVnRrQ2dZRUF3U1Q5bHpBRkx1Sjl0QlFwK2hPdQpOQ3F0cWF5NDEwc25EUTh2dHZSb3BsbVo5dDQrc1lpSEIxbFRMVTNDdWo5eDZKeG9qWnI1MGZDWVFFQ29JVTh1CjZ3TDZUZ0dlNVgvaTlhRXc1VzU1SkVITldPSEZGR09JbzNWT2ZzWExiM29DMERtRm5HdHZJcExuc3pLTXpvYkcKNklWaTFjeCt2T3hyZ0s3RGpFQkMrUjA9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
    "encoding": "base64",
    "item": [
        "key",
        {
            "ca": "self-sign",
            "dns": [
                "localhost"
            ],
            "name": "quadlet_demo"
        }
    ],
    "source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed-node3] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUs0c2RJMVJnay9BaTFMeU1LOXUyMUF3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJGbApNbU0zTkRoa0xUVXhPREkwWm1Nd0xUaGlOVEptTWpNd0xXRm1ObVZrWWpSbU1CNFhEVEkxTURFd05qRTROREl6Ck9Wb1hEVEkyTURFd05qRTROREl6T1Zvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFybzUvR3ZSSmVid1dGd0RGaXlVUHdOTlBOL1BvMGpTWQphaFBUK3RrMEhDZFJVSy9ZOHkzWkpqM0hld1Q3d0hrcnRDQTF6ODVGMG41b3A0M0RpYnFaK0FaQWRBZDhQL3JsCm9Hdlo5RGZJWExwUlNrY2pQcWJWTG9sK1YvU3NuZjJsU2RuVEtJcStvOFRkZEVRYXUwUGZabTVLWXN3aFp1dlUKMWZLcytmQXFFbUVOOFJldWxkOUl2RlJ0Qmt1bWtVZmRtdlNYYVUvaStQS0pLYy9hL2EzVlpwUTBDcVYvczNYdQp0OXB4RU5pRUIwL2FFV1Fia2RSV3g5MHBnOFQ3UE0rTStSZUR2M2dzQW4wcFgxaGdUckFxd0ZDa2VSQnd1Qnl3CmRtZWFlM3JBcU16a0xkVlJPR3BHTGZ1eGkzTXlmNEU5SzZhK0ZaRGx6N3VQNGd2N2R6Uk4wUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGRHByCnVobXlBNjcwUXdQR1pWT3VaMDlBc2NjV01COEdBMVVkSXdRWU1CYUFGR1F2NFNuMWNxYVhZZFZ6SmZzTlVjYysKTHBWbE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQ0tTdmdqbkpEUW5JYzY0a2tRUmVuUnRtVDAweTJ5YlREKwpZenJQU0t6SEhuenhYWTVBeldHdzhYQ1J0WXlldnEzanc3MTg3Q3BITzN4ZFpPcEtudVN4ejhFU29zN09MNjA3CktlM3BNQllYQUgzWlNMZjlIT215SHJIVTJqdERhT2xtM0hHQ25kN2htYXVaQkRFZVlyOUhlaFk4RlZtN3o3a2cKOWN1Si9aN1JxUDEzdVpqRUd3dmlGTWpaa1JzWlJtL1dmNjRpUVk1K0FVMERFYTRHbGFhZ2M1c0ZKTmI5R2hpTwpXV082QkhLWkYyZGpIZDYwblVYMFErQXo4ajE1c29obW5tekJCcTRBZE1qY0VrS2pUbDFMWEdOZ0ExakZFL2xTClFWUzFoalhaY0xXVi9LQ042NjMzY0l3Njd3YkM0UDg5dHYyaTJlKy8wOHViZzMxWE54enIKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
    "encoding": "base64",
    "item": [
        "ca",
        {
            "ca": "self-sign",
            "dns": [
                "localhost"
            ],
            "name": "quadlet_demo"
        }
    ],
    "source": "/etc/pki/tls/certs/quadlet_demo.crt"
}

TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160
Monday 06 January 2025  13:42:41 -0500 (0:00:01.263)       0:00:12.119 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "certificate_test_certs": {
            "quadlet_demo": {
                "ca": "/etc/pki/tls/certs/quadlet_demo.crt",
                "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n",
                "cert": "/etc/pki/tls/certs/quadlet_demo.crt",
                "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n",
                "key": "/etc/pki/tls/private/quadlet_demo.key",
                "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCujn8a9El5vBYX\nAMWLJQ/A00838+jSNJhqE9P62TQcJ1FQr9jzLdkmPcd7BPvAeSu0IDXPzkXSfmin\njcOJupn4BkB0B3w/+uWga9n0N8hculFKRyM+ptUuiX5X9Kyd/aVJ2dMoir6jxN10\nRBq7Q99mbkpizCFm69TV8qz58CoSYQ3xF66V30i8VG0GS6aRR92a9JdpT+L48okp\nz9r9rdVmlDQKpX+zde632nEQ2IQHT9oRZBuR1FbH3SmDxPs8z4z5F4O/eCwCfSlf\nWGBOsCrAUKR5EHC4HLB2Z5p7esCozOQt1VE4akYt+7GLczJ/gT0rpr4VkOXPu4/i\nC/t3NE3RAgMBAAECggEABKmUto0Y21INSv7/x8gtJArg3k87BEZYXdj6vAQKMshQ\nETYpNtGeL7nNtrj62njCFJUZqtvJHKa4t+T++JJN44STOyYqCPF0KbVHxJCq3uMx\nIalFxToijpQolkPYSn4IaLETRU1zax9qhNz+08qr0GoYvYvTRpL9A9vwB9sE3KnE\nrCergA52eZn2Qls6peQT5fxowlEPGyIHNaFPsdzpEBvibknWXo3PQXcAOdw4BP1q\nnMF/nRXn1CdA3pFCWyM9/0apNmLfX/qTS3gyloB1fUgD/nZ3ewlZ3bdW9GzVQlMB\n3xSlnSFbauGAO3EqTW4UcmlDjQatiLZrXMUcZTe16wKBgQDgJNzc9VQvk49rsvsw\nuNfHXI6N/Arg1Twh28oYgv7m2DUMfHqbWlxv8jDESsWu6j5DyrCcfn/c7B4tfTlE\njpcIwdyHdy1RV2ZWvDIl+90+0HbAgpGAZhMxQxTpJq/rimWgGdCM+OTZDMS2iiSc\n0WvSD6Z1Q6lWwoAiBL7ZZYjjzwKBgQDHXXkYmG6mM78/uZ0XbW1phTVysH7JOPis\n0hPl2EOZgzEU9QLMTos6SjIE+qgE1Ogcpi6nIlEys7x+uIF82y0psCaoxSwa0WmO\njd83zuewhtjmTlIyRqeZzpqepXoUMzRFdGQEHb48LSVEuGtb5lToAMTXgvjz+dyU\nOc3zJlj8XwKBgG54PJJ6Jx+7rjf6YmrE7zRGncvEqOfesGBPnu8twMutS5f30cW3\nZ0+VIgvcfkmTEVUAFMNMZWBQzx7EVPFBBZc/VJ0/U4p/dOTSpDSsSYKn459Fush5\nhoQE981hT9ravvOFRY/1K3T+IgWFWD4hyO4i4Hty+YbVHammhRlIPYtpAoGAYNJQ\ngVVmR2lSNEycskf+nwSGlJCKwobcnnHYIAQLsH46VAc5uKdcjo6CBFEHUHFl2KQM\nfDb6YDlSDejlWhHHuTtHSyeW599T+FuGUMA3XU/f/tmCqdAOjW/9P5b11uGXPTPt\nbuJTBT6XnuxBl3whOBtHfbKp3TM3oPUBxFiXVtkCgYEAwST9lzAFLuJ9tBQp+hOu\nNCqtqay410snDQ8vtvRoplmZ9t4+sYiHB1lTLU3Cuj9x6JxojZr50fCYQECoIU8u\n6wL6TgGe5X/i9aEw5W55JEHNWOHFFGOIo3VOfsXLb3oC0DmFnGtvIpLnszKMzobG\n6IVi1cx+vOxrgK7DjEBC+R0=\n-----END PRIVATE KEY-----\n"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176
Monday 06 January 2025  13:42:41 -0500 (0:00:00.083)       0:00:12.202 ******** 
ok: [managed-node3] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCujn8a9El5vBYX\nAMWLJQ/A00838+jSNJhqE9P62TQcJ1FQr9jzLdkmPcd7BPvAeSu0IDXPzkXSfmin\njcOJupn4BkB0B3w/+uWga9n0N8hculFKRyM+ptUuiX5X9Kyd/aVJ2dMoir6jxN10\nRBq7Q99mbkpizCFm69TV8qz58CoSYQ3xF66V30i8VG0GS6aRR92a9JdpT+L48okp\nz9r9rdVmlDQKpX+zde632nEQ2IQHT9oRZBuR1FbH3SmDxPs8z4z5F4O/eCwCfSlf\nWGBOsCrAUKR5EHC4HLB2Z5p7esCozOQt1VE4akYt+7GLczJ/gT0rpr4VkOXPu4/i\nC/t3NE3RAgMBAAECggEABKmUto0Y21INSv7/x8gtJArg3k87BEZYXdj6vAQKMshQ\nETYpNtGeL7nNtrj62njCFJUZqtvJHKa4t+T++JJN44STOyYqCPF0KbVHxJCq3uMx\nIalFxToijpQolkPYSn4IaLETRU1zax9qhNz+08qr0GoYvYvTRpL9A9vwB9sE3KnE\nrCergA52eZn2Qls6peQT5fxowlEPGyIHNaFPsdzpEBvibknWXo3PQXcAOdw4BP1q\nnMF/nRXn1CdA3pFCWyM9/0apNmLfX/qTS3gyloB1fUgD/nZ3ewlZ3bdW9GzVQlMB\n3xSlnSFbauGAO3EqTW4UcmlDjQatiLZrXMUcZTe16wKBgQDgJNzc9VQvk49rsvsw\nuNfHXI6N/Arg1Twh28oYgv7m2DUMfHqbWlxv8jDESsWu6j5DyrCcfn/c7B4tfTlE\njpcIwdyHdy1RV2ZWvDIl+90+0HbAgpGAZhMxQxTpJq/rimWgGdCM+OTZDMS2iiSc\n0WvSD6Z1Q6lWwoAiBL7ZZYjjzwKBgQDHXXkYmG6mM78/uZ0XbW1phTVysH7JOPis\n0hPl2EOZgzEU9QLMTos6SjIE+qgE1Ogcpi6nIlEys7x+uIF82y0psCaoxSwa0WmO\njd83zuewhtjmTlIyRqeZzpqepXoUMzRFdGQEHb48LSVEuGtb5lToAMTXgvjz+dyU\nOc3zJlj8XwKBgG54PJJ6Jx+7rjf6YmrE7zRGncvEqOfesGBPnu8twMutS5f30cW3\nZ0+VIgvcfkmTEVUAFMNMZWBQzx7EVPFBBZc/VJ0/U4p/dOTSpDSsSYKn459Fush5\nhoQE981hT9ravvOFRY/1K3T+IgWFWD4hyO4i4Hty+YbVHammhRlIPYtpAoGAYNJQ\ngVVmR2lSNEycskf+nwSGlJCKwobcnnHYIAQLsH46VAc5uKdcjo6CBFEHUHFl2KQM\nfDb6YDlSDejlWhHHuTtHSyeW599T+FuGUMA3XU/f/tmCqdAOjW/9P5b11uGXPTPt\nbuJTBT6XnuxBl3whOBtHfbKp3TM3oPUBxFiXVtkCgYEAwST9lzAFLuJ9tBQp+hOu\nNCqtqay410snDQ8vtvRoplmZ9t4+sYiHB1lTLU3Cuj9x6JxojZr50fCYQECoIU8u\n6wL6TgGe5X/i9aEw5W55JEHNWOHFFGOIo3VOfsXLb3oC0DmFnGtvIpLnszKMzobG\n6IVi1cx+vOxrgK7DjEBC+R0=\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n'}) => {
    "ansible_loop_var": "item",
    "changed": false,
    "cmd": [
        "getcert",
        "stop-tracking",
        "-f",
        "/etc/pki/tls/certs/quadlet_demo.crt"
    ],
    "delta": "0:00:00.025743",
    "end": "2025-01-06 13:42:42.023611",
    "item": {
        "ca": "/etc/pki/tls/certs/quadlet_demo.crt",
        "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n",
        "cert": "/etc/pki/tls/certs/quadlet_demo.crt",
        "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAK4sdI1Rgk/Ai1LyMK9u21AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fl\nMmM3NDhkLTUxODI0ZmMwLThiNTJmMjMwLWFmNmVkYjRmMB4XDTI1MDEwNjE4NDIz\nOVoXDTI2MDEwNjE4NDIzOVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAro5/GvRJebwWFwDFiyUPwNNPN/Po0jSY\nahPT+tk0HCdRUK/Y8y3ZJj3HewT7wHkrtCA1z85F0n5op43DibqZ+AZAdAd8P/rl\noGvZ9DfIXLpRSkcjPqbVLol+V/Ssnf2lSdnTKIq+o8TddEQau0PfZm5KYswhZuvU\n1fKs+fAqEmEN8Reuld9IvFRtBkumkUfdmvSXaU/i+PKJKc/a/a3VZpQ0CqV/s3Xu\nt9pxENiEB0/aEWQbkdRWx90pg8T7PM+M+ReDv3gsAn0pX1hgTrAqwFCkeRBwuByw\ndmeae3rAqMzkLdVROGpGLfuxi3Myf4E9K6a+FZDlz7uP4gv7dzRN0QIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDpr\nuhmyA670QwPGZVOuZ09AsccWMB8GA1UdIwQYMBaAFGQv4Sn1cqaXYdVzJfsNUcc+\nLpVlMA0GCSqGSIb3DQEBCwUAA4IBAQCKSvgjnJDQnIc64kkQRenRtmT00y2ybTD+\nYzrPSKzHHnzxXY5AzWGw8XCRtYyevq3jw7187CpHO3xdZOpKnuSxz8ESos7OL607\nKe3pMBYXAH3ZSLf9HOmyHrHU2jtDaOlm3HGCnd7hmauZBDEeYr9HehY8FVm7z7kg\n9cuJ/Z7RqP13uZjEGwviFMjZkRsZRm/Wf64iQY5+AU0DEa4Glaagc5sFJNb9GhiO\nWWO6BHKZF2djHd60nUX0Q+Az8j15sohmnmzBBq4AdMjcEkKjTl1LXGNgA1jFE/lS\nQVS1hjXZcLWV/KCN6633cIw67wbC4P89tv2i2e+/08ubg31XNxzr\n-----END CERTIFICATE-----\n",
        "key": "/etc/pki/tls/private/quadlet_demo.key",
        "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCujn8a9El5vBYX\nAMWLJQ/A00838+jSNJhqE9P62TQcJ1FQr9jzLdkmPcd7BPvAeSu0IDXPzkXSfmin\njcOJupn4BkB0B3w/+uWga9n0N8hculFKRyM+ptUuiX5X9Kyd/aVJ2dMoir6jxN10\nRBq7Q99mbkpizCFm69TV8qz58CoSYQ3xF66V30i8VG0GS6aRR92a9JdpT+L48okp\nz9r9rdVmlDQKpX+zde632nEQ2IQHT9oRZBuR1FbH3SmDxPs8z4z5F4O/eCwCfSlf\nWGBOsCrAUKR5EHC4HLB2Z5p7esCozOQt1VE4akYt+7GLczJ/gT0rpr4VkOXPu4/i\nC/t3NE3RAgMBAAECggEABKmUto0Y21INSv7/x8gtJArg3k87BEZYXdj6vAQKMshQ\nETYpNtGeL7nNtrj62njCFJUZqtvJHKa4t+T++JJN44STOyYqCPF0KbVHxJCq3uMx\nIalFxToijpQolkPYSn4IaLETRU1zax9qhNz+08qr0GoYvYvTRpL9A9vwB9sE3KnE\nrCergA52eZn2Qls6peQT5fxowlEPGyIHNaFPsdzpEBvibknWXo3PQXcAOdw4BP1q\nnMF/nRXn1CdA3pFCWyM9/0apNmLfX/qTS3gyloB1fUgD/nZ3ewlZ3bdW9GzVQlMB\n3xSlnSFbauGAO3EqTW4UcmlDjQatiLZrXMUcZTe16wKBgQDgJNzc9VQvk49rsvsw\nuNfHXI6N/Arg1Twh28oYgv7m2DUMfHqbWlxv8jDESsWu6j5DyrCcfn/c7B4tfTlE\njpcIwdyHdy1RV2ZWvDIl+90+0HbAgpGAZhMxQxTpJq/rimWgGdCM+OTZDMS2iiSc\n0WvSD6Z1Q6lWwoAiBL7ZZYjjzwKBgQDHXXkYmG6mM78/uZ0XbW1phTVysH7JOPis\n0hPl2EOZgzEU9QLMTos6SjIE+qgE1Ogcpi6nIlEys7x+uIF82y0psCaoxSwa0WmO\njd83zuewhtjmTlIyRqeZzpqepXoUMzRFdGQEHb48LSVEuGtb5lToAMTXgvjz+dyU\nOc3zJlj8XwKBgG54PJJ6Jx+7rjf6YmrE7zRGncvEqOfesGBPnu8twMutS5f30cW3\nZ0+VIgvcfkmTEVUAFMNMZWBQzx7EVPFBBZc/VJ0/U4p/dOTSpDSsSYKn459Fush5\nhoQE981hT9ravvOFRY/1K3T+IgWFWD4hyO4i4Hty+YbVHammhRlIPYtpAoGAYNJQ\ngVVmR2lSNEycskf+nwSGlJCKwobcnnHYIAQLsH46VAc5uKdcjo6CBFEHUHFl2KQM\nfDb6YDlSDejlWhHHuTtHSyeW599T+FuGUMA3XU/f/tmCqdAOjW/9P5b11uGXPTPt\nbuJTBT6XnuxBl3whOBtHfbKp3TM3oPUBxFiXVtkCgYEAwST9lzAFLuJ9tBQp+hOu\nNCqtqay410snDQ8vtvRoplmZ9t4+sYiHB1lTLU3Cuj9x6JxojZr50fCYQECoIU8u\n6wL6TgGe5X/i9aEw5W55JEHNWOHFFGOIo3VOfsXLb3oC0DmFnGtvIpLnszKMzobG\n6IVi1cx+vOxrgK7DjEBC+R0=\n-----END PRIVATE KEY-----\n"
    },
    "rc": 0,
    "start": "2025-01-06 13:42:41.997868"
}

STDOUT:

Request "20250106184239" removed.

TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
Monday 06 January 2025  13:42:42 -0500 (0:00:00.586)       0:00:12.788 ******** 
changed: [managed-node3] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": "/etc/pki/tls/certs/quadlet_demo.crt",
    "path": "/etc/pki/tls/certs/quadlet_demo.crt",
    "state": "absent"
}
changed: [managed-node3] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": "/etc/pki/tls/private/quadlet_demo.key",
    "path": "/etc/pki/tls/private/quadlet_demo.key",
    "state": "absent"
}
ok: [managed-node3] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "/etc/pki/tls/certs/quadlet_demo.crt",
    "path": "/etc/pki/tls/certs/quadlet_demo.crt",
    "state": "absent"
}

TASK [Run the role] ************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62
Monday 06 January 2025  13:42:43 -0500 (0:00:01.300)       0:00:14.089 ******** 
included: fedora.linux_system_roles.podman for managed-node3

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 06 January 2025  13:42:43 -0500 (0:00:00.139)       0:00:14.229 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 06 January 2025  13:42:43 -0500 (0:00:00.079)       0:00:14.308 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 06 January 2025  13:42:43 -0500 (0:00:00.103)       0:00:14.412 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 06 January 2025  13:42:44 -0500 (0:00:00.475)       0:00:14.887 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 06 January 2025  13:42:44 -0500 (0:00:00.037)       0:00:14.925 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 06 January 2025  13:42:44 -0500 (0:00:00.384)       0:00:15.310 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 06 January 2025  13:42:44 -0500 (0:00:00.030)       0:00:15.340 ******** 
ok: [managed-node3] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 06 January 2025  13:42:44 -0500 (0:00:00.059)       0:00:15.400 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 06 January 2025  13:42:45 -0500 (0:00:00.964)       0:00:16.364 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 06 January 2025  13:42:45 -0500 (0:00:00.057)       0:00:16.421 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 06 January 2025  13:42:45 -0500 (0:00:00.081)       0:00:16.503 ******** 
skipping: [managed-node3] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 06 January 2025  13:42:45 -0500 (0:00:00.113)       0:00:16.616 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 06 January 2025  13:42:46 -0500 (0:00:00.082)       0:00:16.698 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 06 January 2025  13:42:46 -0500 (0:00:00.058)       0:00:16.757 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.026012",
    "end": "2025-01-06 13:42:46.386463",
    "rc": 0,
    "start": "2025-01-06 13:42:46.360451"
}

STDOUT:

podman version 5.3.1

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 06 January 2025  13:42:46 -0500 (0:00:00.404)       0:00:17.162 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 06 January 2025  13:42:46 -0500 (0:00:00.057)       0:00:17.219 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 06 January 2025  13:42:46 -0500 (0:00:00.048)       0:00:17.268 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 06 January 2025  13:42:46 -0500 (0:00:00.156)       0:00:17.424 ******** 
META: end_host conditional evaluated to False, continuing execution for managed-node3
skipping: [managed-node3] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node3

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 06 January 2025  13:42:46 -0500 (0:00:00.123)       0:00:17.548 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 06 January 2025  13:42:46 -0500 (0:00:00.069)       0:00:17.617 ******** 
META: end_host conditional evaluated to False, continuing execution for managed-node3
skipping: [managed-node3] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node3

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 06 January 2025  13:42:46 -0500 (0:00:00.044)       0:00:17.661 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:42:47 -0500 (0:00:00.102)       0:00:17.763 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "getent_passwd": {
            "root": [
                "x",
                "0",
                "0",
                "Super User",
                "/root",
                "/bin/bash"
            ]
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:42:47 -0500 (0:00:00.522)       0:00:18.286 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:42:47 -0500 (0:00:00.089)       0:00:18.375 ******** 
ok: [managed-node3] => {}

MSG:

item {}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:42:47 -0500 (0:00:00.088)       0:00:18.464 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:42:47 -0500 (0:00:00.102)       0:00:18.567 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:42:48 -0500 (0:00:00.515)       0:00:19.082 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:42:48 -0500 (0:00:00.099)       0:00:19.182 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:42:48 -0500 (0:00:00.096)       0:00:19.278 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:42:48 -0500 (0:00:00.071)       0:00:19.350 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:42:48 -0500 (0:00:00.104)       0:00:19.454 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:42:48 -0500 (0:00:00.072)       0:00:19.527 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:42:48 -0500 (0:00:00.106)       0:00:19.633 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:42:49 -0500 (0:00:00.085)       0:00:19.718 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 06 January 2025  13:42:49 -0500 (0:00:00.059)       0:00:19.778 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Monday 06 January 2025  13:42:49 -0500 (0:00:00.151)       0:00:19.930 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 06 January 2025  13:42:49 -0500 (0:00:00.100)       0:00:20.031 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 06 January 2025  13:42:49 -0500 (0:00:00.097)       0:00:20.128 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Monday 06 January 2025  13:42:49 -0500 (0:00:00.049)       0:00:20.177 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 06 January 2025  13:42:49 -0500 (0:00:00.072)       0:00:20.249 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 06 January 2025  13:42:49 -0500 (0:00:00.050)       0:00:20.300 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Monday 06 January 2025  13:42:49 -0500 (0:00:00.046)       0:00:20.346 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Monday 06 January 2025  13:42:49 -0500 (0:00:00.098)       0:00:20.445 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Monday 06 January 2025  13:42:49 -0500 (0:00:00.049)       0:00:20.494 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Monday 06 January 2025  13:42:49 -0500 (0:00:00.045)       0:00:20.540 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Monday 06 January 2025  13:42:49 -0500 (0:00:00.102)       0:00:20.642 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Monday 06 January 2025  13:42:49 -0500 (0:00:00.049)       0:00:20.692 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Monday 06 January 2025  13:42:50 -0500 (0:00:00.050)       0:00:20.743 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Monday 06 January 2025  13:42:50 -0500 (0:00:00.050)       0:00:20.794 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Monday 06 January 2025  13:42:50 -0500 (0:00:00.051)       0:00:20.845 ******** 
included: fedora.linux_system_roles.firewall for managed-node3

TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 06 January 2025  13:42:50 -0500 (0:00:00.302)       0:00:21.148 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3

TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 06 January 2025  13:42:50 -0500 (0:00:00.156)       0:00:21.304 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 06 January 2025  13:42:50 -0500 (0:00:00.161)       0:00:21.466 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 06 January 2025  13:42:51 -0500 (0:00:00.459)       0:00:21.926 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 06 January 2025  13:42:51 -0500 (0:00:00.067)       0:00:21.994 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 06 January 2025  13:42:51 -0500 (0:00:00.490)       0:00:22.484 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Monday 06 January 2025  13:42:51 -0500 (0:00:00.063)       0:00:22.547 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Monday 06 January 2025  13:42:52 -0500 (0:00:00.759)       0:00:23.307 ******** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Monday 06 January 2025  13:42:52 -0500 (0:00:00.100)       0:00:23.407 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Monday 06 January 2025  13:42:52 -0500 (0:00:00.102)       0:00:23.510 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 06 January 2025  13:42:52 -0500 (0:00:00.098)       0:00:23.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Monday 06 January 2025  13:42:52 -0500 (0:00:00.067)       0:00:23.677 ******** 
skipping: [managed-node3] => (item=nftables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "nftables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=iptables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "iptables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=ufw)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "ufw",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Monday 06 January 2025  13:42:53 -0500 (0:00:00.106)       0:00:23.783 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": "firewalld",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "dbus-broker.service polkit.service system.slice basic.target dbus.socket sysinit.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "network-pre.target shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ipset.service shutdown.target ebtables.service ip6tables.service iptables.service",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3166248960",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "dbus.socket system.slice sysinit.target",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Monday 06 January 2025  13:42:53 -0500 (0:00:00.628)       0:00:24.411 ******** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": true,
    "name": "firewalld",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "polkit.service sysinit.target dbus.socket system.slice basic.target dbus-broker.service",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "network-pre.target shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ip6tables.service iptables.service shutdown.target ebtables.service ipset.service",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3166748672",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "system.slice sysinit.target dbus.socket",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Monday 06 January 2025  13:42:54 -0500 (0:00:01.115)       0:00:25.527 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_previous_replaced": false,
        "__firewall_python_cmd": "/usr/bin/python3.12",
        "__firewall_report_changed": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Monday 06 January 2025  13:42:54 -0500 (0:00:00.067)       0:00:25.594 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Monday 06 January 2025  13:42:54 -0500 (0:00:00.050)       0:00:25.644 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Monday 06 January 2025  13:42:54 -0500 (0:00:00.047)       0:00:25.692 ******** 
changed: [managed-node3] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
    "__firewall_changed": true,
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "port": "8000/tcp",
        "state": "enabled"
    }
}
changed: [managed-node3] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
    "__firewall_changed": true,
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "port": "9000/tcp",
        "state": "enabled"
    }
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Monday 06 January 2025  13:42:56 -0500 (0:00:01.414)       0:00:27.107 ******** 
skipping: [managed-node3] => (item={'port': '8000/tcp', 'state': 'enabled'})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall | length == 1",
    "item": {
        "port": "8000/tcp",
        "state": "enabled"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item={'port': '9000/tcp', 'state': 'enabled'})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall | length == 1",
    "item": {
        "port": "9000/tcp",
        "state": "enabled"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Monday 06 January 2025  13:42:56 -0500 (0:00:00.126)       0:00:27.234 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall | length == 1",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Monday 06 January 2025  13:42:56 -0500 (0:00:00.074)       0:00:27.308 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Monday 06 January 2025  13:42:56 -0500 (0:00:00.074)       0:00:27.382 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Monday 06 January 2025  13:42:56 -0500 (0:00:00.077)       0:00:27.460 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Monday 06 January 2025  13:42:56 -0500 (0:00:00.093)       0:00:27.554 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Monday 06 January 2025  13:42:56 -0500 (0:00:00.141)       0:00:27.695 ******** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_previous_replaced | bool"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Monday 06 January 2025  13:42:57 -0500 (0:00:00.087)       0:00:27.783 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Monday 06 January 2025  13:42:57 -0500 (0:00:00.051)       0:00:27.835 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Monday 06 January 2025  13:42:57 -0500 (0:00:00.049)       0:00:27.885 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Monday 06 January 2025  13:42:57 -0500 (0:00:00.045)       0:00:27.930 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Monday 06 January 2025  13:42:57 -0500 (0:00:00.044)       0:00:27.975 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:42:57 -0500 (0:00:00.310)       0:00:28.285 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:42:57 -0500 (0:00:00.078)       0:00:28.364 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:42:57 -0500 (0:00:00.115)       0:00:28.479 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:42:57 -0500 (0:00:00.039)       0:00:28.519 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:42:57 -0500 (0:00:00.037)       0:00:28.556 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "data": "the_root_password_vault_encrypted",
    "name": "mysql-root-password-container",
    "state": "present"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:42:57 -0500 (0:00:00.038)       0:00:28.595 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:42:57 -0500 (0:00:00.064)       0:00:28.659 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:42:58 -0500 (0:00:00.048)       0:00:28.708 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:42:58 -0500 (0:00:00.030)       0:00:28.738 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:42:58 -0500 (0:00:00.029)       0:00:28.768 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:42:58 -0500 (0:00:00.034)       0:00:28.802 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:42:58 -0500 (0:00:00.036)       0:00:28.839 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:42:58 -0500 (0:00:00.103)       0:00:28.943 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:42:58 -0500 (0:00:00.065)       0:00:29.008 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:42:58 -0500 (0:00:00.071)       0:00:29.080 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:42:58 -0500 (0:00:00.050)       0:00:29.131 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:42:58 -0500 (0:00:00.080)       0:00:29.211 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:42:58 -0500 (0:00:00.118)       0:00:29.329 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:42:58 -0500 (0:00:00.052)       0:00:29.382 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:42:58 -0500 (0:00:00.060)       0:00:29.442 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:42:58 -0500 (0:00:00.050)       0:00:29.493 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:42:58 -0500 (0:00:00.049)       0:00:29.542 ******** 
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:42:59 -0500 (0:00:00.823)       0:00:30.365 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:42:59 -0500 (0:00:00.052)       0:00:30.418 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:42:59 -0500 (0:00:00.089)       0:00:30.507 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:42:59 -0500 (0:00:00.052)       0:00:30.560 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:42:59 -0500 (0:00:00.061)       0:00:30.621 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "data": "apiVersion: v1\ndata:\n  password: \"dGhlX3Jvb3RfcGFzc3dvcmRfdmF1bHRfZW5jcnlwdGVk\"\nkind: Secret\nmetadata:\n  name: mysql-root-password-kube\n",
    "name": "mysql-root-password-kube",
    "state": "present"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:42:59 -0500 (0:00:00.051)       0:00:30.673 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:00 -0500 (0:00:00.072)       0:00:30.745 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:00 -0500 (0:00:00.085)       0:00:30.831 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:00 -0500 (0:00:00.080)       0:00:30.912 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:00 -0500 (0:00:00.049)       0:00:30.961 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:00 -0500 (0:00:00.096)       0:00:31.058 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:00 -0500 (0:00:00.060)       0:00:31.119 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:00 -0500 (0:00:00.062)       0:00:31.181 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:00 -0500 (0:00:00.053)       0:00:31.235 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:00 -0500 (0:00:00.092)       0:00:31.327 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:43:00 -0500 (0:00:00.082)       0:00:31.410 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:43:00 -0500 (0:00:00.100)       0:00:31.510 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:00 -0500 (0:00:00.170)       0:00:31.680 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:01 -0500 (0:00:00.067)       0:00:31.748 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:01 -0500 (0:00:00.049)       0:00:31.797 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:43:01 -0500 (0:00:00.049)       0:00:31.847 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:43:01 -0500 (0:00:00.072)       0:00:31.920 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:43:01 -0500 (0:00:00.520)       0:00:32.441 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:43:01 -0500 (0:00:00.034)       0:00:32.476 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:01 -0500 (0:00:00.055)       0:00:32.531 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:01 -0500 (0:00:00.033)       0:00:32.565 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:01 -0500 (0:00:00.036)       0:00:32.602 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "data": "apiVersion: v1\ndata:\n  certificate.key: LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ3VqbjhhOUVsNXZCWVgKQU1XTEpRL0EwMDgzOCtqU05KaHFFOVA2MlRRY0oxRlFyOWp6TGRrbVBjZDdCUHZBZVN1MElEWFB6a1hTZm1pbgpqY09KdXBuNEJrQjBCM3cvK3VXZ2E5bjBOOGhjdWxGS1J5TStwdFV1aVg1WDlLeWQvYVZKMmRNb2lyNmp4TjEwClJCcTdROTltYmtwaXpDRm02OVRWOHF6NThDb1NZUTN4RjY2VjMwaThWRzBHUzZhUlI5MmE5SmRwVCtMNDhva3AKejlyOXJkVm1sRFFLcFgremRlNjMybkVRMklRSFQ5b1JaQnVSMUZiSDNTbUR4UHM4ejR6NUY0Ty9lQ3dDZlNsZgpXR0JPc0NyQVVLUjVFSEM0SExCMlo1cDdlc0Nvek9RdDFWRTRha1l0KzdHTGN6Si9nVDBycHI0VmtPWFB1NC9pCkMvdDNORTNSQWdNQkFBRUNnZ0VBQkttVXRvMFkyMUlOU3Y3L3g4Z3RKQXJnM2s4N0JFWllYZGo2dkFRS01zaFEKRVRZcE50R2VMN25OdHJqNjJuakNGSlVacXR2SkhLYTR0K1QrK0pKTjQ0U1RPeVlxQ1BGMEtiVkh4SkNxM3VNeApJYWxGeFRvaWpwUW9sa1BZU240SWFMRVRSVTF6YXg5cWhOeiswOHFyMEdvWXZZdlRScEw5QTl2d0I5c0UzS25FCnJDZXJnQTUyZVpuMlFsczZwZVFUNWZ4b3dsRVBHeUlITmFGUHNkenBFQnZpYmtuV1hvM1BRWGNBT2R3NEJQMXEKbk1GL25SWG4xQ2RBM3BGQ1d5TTkvMGFwTm1MZlgvcVRTM2d5bG9CMWZVZ0QvblozZXdsWjNiZFc5R3pWUWxNQgozeFNsblNGYmF1R0FPM0VxVFc0VWNtbERqUWF0aUxaclhNVWNaVGUxNndLQmdRRGdKTnpjOVZRdms0OXJzdnN3CnVOZkhYSTZOL0FyZzFUd2gyOG9ZZ3Y3bTJEVU1mSHFiV2x4djhqREVTc1d1Nmo1RHlyQ2Nmbi9jN0I0dGZUbEUKanBjSXdkeUhkeTFSVjJaV3ZESWwrOTArMEhiQWdwR0FaaE14UXhUcEpxL3JpbVdnR2RDTStPVFpETVMyaWlTYwowV3ZTRDZaMVE2bFd3b0FpQkw3WlpZamp6d0tCZ1FESFhYa1ltRzZtTTc4L3VaMFhiVzFwaFRWeXNIN0pPUGlzCjBoUGwyRU9aZ3pFVTlRTE1Ub3M2U2pJRStxZ0UxT2djcGk2bklsRXlzN3grdUlGODJ5MHBzQ2FveFN3YTBXbU8KamQ4M3p1ZXdodGptVGxJeVJxZVp6cHFlcFhvVU16UkZkR1FFSGI0OExTVkV1R3RiNWxUb0FNVFhndmp6K2R5VQpPYzN6SmxqOFh3S0JnRzU0UEpKNkp4KzdyamY2WW1yRTd6UkduY3ZFcU9mZXNHQlBudTh0d011dFM1ZjMwY1czClowK1ZJZ3ZjZmttVEVWVUFGTU5NWldCUXp4N0VWUEZCQlpjL1ZKMC9VNHAvZE9UU3BEU3NTWUtuNDU5RnVzaDUKaG9RRTk4MWhUOXJhdnZPRlJZLzFLM1QrSWdXRldENGh5TzRpNEh0eStZYlZIYW1taFJsSVBZdHBBb0dBWU5KUQpnVlZtUjJsU05FeWNza2YrbndTR2xKQ0t3b2Jjbm5IWUlBUUxzSDQ2VkFjNXVLZGNqbzZDQkZFSFVIRmwyS1FNCmZEYjZZRGxTRGVqbFdoSEh1VHRIU3llVzU5OVQrRnVHVU1BM1hVL2YvdG1DcWRBT2pXLzlQNWIxMXVHWFBUUHQKYnVKVEJUNlhudXhCbDN3aE9CdEhmYktwM1RNM29QVUJ4RmlYVnRrQ2dZRUF3U1Q5bHpBRkx1Sjl0QlFwK2hPdQpOQ3F0cWF5NDEwc25EUTh2dHZSb3BsbVo5dDQrc1lpSEIxbFRMVTNDdWo5eDZKeG9qWnI1MGZDWVFFQ29JVTh1CjZ3TDZUZ0dlNVgvaTlhRXc1VzU1SkVITldPSEZGR09JbzNWT2ZzWExiM29DMERtRm5HdHZJcExuc3pLTXpvYkcKNklWaTFjeCt2T3hyZ0s3RGpFQkMrUjA9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K\n  certificate.pem: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUs0c2RJMVJnay9BaTFMeU1LOXUyMUF3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJGbApNbU0zTkRoa0xUVXhPREkwWm1Nd0xUaGlOVEptTWpNd0xXRm1ObVZrWWpSbU1CNFhEVEkxTURFd05qRTROREl6Ck9Wb1hEVEkyTURFd05qRTROREl6T1Zvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFybzUvR3ZSSmVid1dGd0RGaXlVUHdOTlBOL1BvMGpTWQphaFBUK3RrMEhDZFJVSy9ZOHkzWkpqM0hld1Q3d0hrcnRDQTF6ODVGMG41b3A0M0RpYnFaK0FaQWRBZDhQL3JsCm9Hdlo5RGZJWExwUlNrY2pQcWJWTG9sK1YvU3NuZjJsU2RuVEtJcStvOFRkZEVRYXUwUGZabTVLWXN3aFp1dlUKMWZLcytmQXFFbUVOOFJldWxkOUl2RlJ0Qmt1bWtVZmRtdlNYYVUvaStQS0pLYy9hL2EzVlpwUTBDcVYvczNYdQp0OXB4RU5pRUIwL2FFV1Fia2RSV3g5MHBnOFQ3UE0rTStSZUR2M2dzQW4wcFgxaGdUckFxd0ZDa2VSQnd1Qnl3CmRtZWFlM3JBcU16a0xkVlJPR3BHTGZ1eGkzTXlmNEU5SzZhK0ZaRGx6N3VQNGd2N2R6Uk4wUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGRHByCnVobXlBNjcwUXdQR1pWT3VaMDlBc2NjV01COEdBMVVkSXdRWU1CYUFGR1F2NFNuMWNxYVhZZFZ6SmZzTlVjYysKTHBWbE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQ0tTdmdqbkpEUW5JYzY0a2tRUmVuUnRtVDAweTJ5YlREKwpZenJQU0t6SEhuenhYWTVBeldHdzhYQ1J0WXlldnEzanc3MTg3Q3BITzN4ZFpPcEtudVN4ejhFU29zN09MNjA3CktlM3BNQllYQUgzWlNMZjlIT215SHJIVTJqdERhT2xtM0hHQ25kN2htYXVaQkRFZVlyOUhlaFk4RlZtN3o3a2cKOWN1Si9aN1JxUDEzdVpqRUd3dmlGTWpaa1JzWlJtL1dmNjRpUVk1K0FVMERFYTRHbGFhZ2M1c0ZKTmI5R2hpTwpXV082QkhLWkYyZGpIZDYwblVYMFErQXo4ajE1c29obW5tekJCcTRBZE1qY0VrS2pUbDFMWEdOZ0ExakZFL2xTClFWUzFoalhaY0xXVi9LQ042NjMzY0l3Njd3YkM0UDg5dHYyaTJlKy8wOHViZzMxWE54enIKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=\nkind: Secret\nmetadata:\n  name: envoy-certificates\n",
    "name": "envoy-certificates",
    "state": "present"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:01 -0500 (0:00:00.047)       0:00:32.649 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:02 -0500 (0:00:00.065)       0:00:32.715 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:02 -0500 (0:00:00.048)       0:00:32.763 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:02 -0500 (0:00:00.085)       0:00:32.849 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:02 -0500 (0:00:00.036)       0:00:32.886 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:02 -0500 (0:00:00.037)       0:00:32.923 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:02 -0500 (0:00:00.034)       0:00:32.957 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:02 -0500 (0:00:00.046)       0:00:33.003 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:02 -0500 (0:00:00.052)       0:00:33.056 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:02 -0500 (0:00:00.056)       0:00:33.112 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:43:02 -0500 (0:00:00.034)       0:00:33.147 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:43:02 -0500 (0:00:00.043)       0:00:33.190 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:02 -0500 (0:00:00.062)       0:00:33.253 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:02 -0500 (0:00:00.029)       0:00:33.283 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:02 -0500 (0:00:00.031)       0:00:33.314 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:43:02 -0500 (0:00:00.029)       0:00:33.344 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:43:02 -0500 (0:00:00.028)       0:00:33.372 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Monday 06 January 2025  13:43:03 -0500 (0:00:00.498)       0:00:33.871 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Monday 06 January 2025  13:43:03 -0500 (0:00:00.027)       0:00:33.898 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:03 -0500 (0:00:00.174)       0:00:34.073 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo.network",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:03 -0500 (0:00:00.072)       0:00:34.145 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:03 -0500 (0:00:00.091)       0:00:34.237 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:03 -0500 (0:00:00.033)       0:00:34.270 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "network",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:03 -0500 (0:00:00.049)       0:00:34.320 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:03 -0500 (0:00:00.056)       0:00:34.377 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:03 -0500 (0:00:00.034)       0:00:34.412 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:03 -0500 (0:00:00.032)       0:00:34.444 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo.network"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:03 -0500 (0:00:00.034)       0:00:34.479 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:03 -0500 (0:00:00.044)       0:00:34.524 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:04 -0500 (0:00:00.451)       0:00:34.975 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:04 -0500 (0:00:00.053)       0:00:35.029 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:04 -0500 (0:00:00.058)       0:00:35.088 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:04 -0500 (0:00:00.058)       0:00:35.146 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:04 -0500 (0:00:00.099)       0:00:35.246 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:04 -0500 (0:00:00.063)       0:00:35.309 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:04 -0500 (0:00:00.050)       0:00:35.360 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:04 -0500 (0:00:00.049)       0:00:35.410 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:04 -0500 (0:00:00.043)       0:00:35.453 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-network.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:04 -0500 (0:00:00.062)       0:00:35.516 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:04 -0500 (0:00:00.042)       0:00:35.558 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_kube_yamls_raw | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:04 -0500 (0:00:00.058)       0:00:35.617 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:05 -0500 (0:00:00.084)       0:00:35.702 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:05 -0500 (0:00:00.086)       0:00:35.788 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:05 -0500 (0:00:00.030)       0:00:35.819 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:05 -0500 (0:00:00.068)       0:00:35.887 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:05 -0500 (0:00:00.050)       0:00:35.937 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:05 -0500 (0:00:00.029)       0:00:35.967 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:05 -0500 (0:00:00.034)       0:00:36.001 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:05 -0500 (0:00:00.043)       0:00:36.045 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:05 -0500 (0:00:00.051)       0:00:36.097 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:05 -0500 (0:00:00.050)       0:00:36.147 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:05 -0500 (0:00:00.406)       0:00:36.553 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
    "dest": "/etc/containers/systemd/quadlet-demo.network",
    "gid": 0,
    "group": "root",
    "md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 74,
    "src": "/root/.ansible/tmp/ansible-tmp-1736188985.913371-13096-237285315394433/.source.network",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:06 -0500 (0:00:00.863)       0:00:37.417 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:06 -0500 (0:00:00.032)       0:00:37.449 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_file is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:06 -0500 (0:00:00.029)       0:00:37.479 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:07 -0500 (0:00:00.744)       0:00:38.223 ******** 
changed: [managed-node3] => {
    "changed": true,
    "name": "quadlet-demo-network.service",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "network-online.target sysinit.target systemd-journald.socket basic.target system.slice -.mount",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo-network.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-network.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3186028544",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-network.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "yes",
        "RemoveIPC": "no",
        "Requires": "sysinit.target -.mount system.slice",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo.network",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-network",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "infinity",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "oneshot",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:43:08 -0500 (0:00:00.616)       0:00:38.839 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_service_started is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:08 -0500 (0:00:00.031)       0:00:38.871 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Volume]",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:08 -0500 (0:00:00.042)       0:00:38.913 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:08 -0500 (0:00:00.038)       0:00:38.952 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:08 -0500 (0:00:00.032)       0:00:38.984 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo-mysql",
        "__podman_quadlet_type": "volume",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:08 -0500 (0:00:00.084)       0:00:39.069 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:08 -0500 (0:00:00.069)       0:00:39.138 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:08 -0500 (0:00:00.045)       0:00:39.184 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:08 -0500 (0:00:00.055)       0:00:39.240 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo-mysql.volume"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:08 -0500 (0:00:00.051)       0:00:39.292 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:08 -0500 (0:00:00.068)       0:00:39.360 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:09 -0500 (0:00:00.440)       0:00:39.800 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:09 -0500 (0:00:00.059)       0:00:39.859 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:09 -0500 (0:00:00.065)       0:00:39.925 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:09 -0500 (0:00:00.058)       0:00:39.983 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:09 -0500 (0:00:00.061)       0:00:40.045 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:09 -0500 (0:00:00.052)       0:00:40.097 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:09 -0500 (0:00:00.053)       0:00:40.150 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:09 -0500 (0:00:00.049)       0:00:40.199 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:09 -0500 (0:00:00.052)       0:00:40.251 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-mysql-volume.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:09 -0500 (0:00:00.095)       0:00:40.347 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:09 -0500 (0:00:00.053)       0:00:40.401 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_kube_yamls_raw | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:09 -0500 (0:00:00.073)       0:00:40.475 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:09 -0500 (0:00:00.208)       0:00:40.683 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:10 -0500 (0:00:00.082)       0:00:40.765 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:10 -0500 (0:00:00.049)       0:00:40.814 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:10 -0500 (0:00:00.191)       0:00:41.006 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:10 -0500 (0:00:00.068)       0:00:41.074 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:10 -0500 (0:00:00.035)       0:00:41.110 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:10 -0500 (0:00:00.038)       0:00:41.149 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:10 -0500 (0:00:00.035)       0:00:41.184 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:10 -0500 (0:00:00.029)       0:00:41.213 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:10 -0500 (0:00:00.030)       0:00:41.243 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 34,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:10 -0500 (0:00:00.410)       0:00:41.653 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
    "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
    "gid": 0,
    "group": "root",
    "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 9,
    "src": "/root/.ansible/tmp/ansible-tmp-1736188991.0438864-13301-179211539160969/.source.volume",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:11 -0500 (0:00:00.840)       0:00:42.494 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:11 -0500 (0:00:00.041)       0:00:42.536 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_file is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:11 -0500 (0:00:00.034)       0:00:42.571 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:12 -0500 (0:00:00.732)       0:00:43.303 ******** 
changed: [managed-node3] => {
    "changed": true,
    "name": "quadlet-demo-mysql-volume.service",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "-.mount systemd-journald.socket network-online.target sysinit.target system.slice basic.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo-mysql-volume.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-mysql-volume.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3199680512",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-mysql-volume.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "yes",
        "RemoveIPC": "no",
        "Requires": "-.mount system.slice sysinit.target",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-mysql-volume",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "infinity",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "oneshot",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:43:13 -0500 (0:00:00.617)       0:00:43.920 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_service_started is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:13 -0500 (0:00:00.047)       0:00:43.968 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
        "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:13 -0500 (0:00:00.125)       0:00:44.093 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:13 -0500 (0:00:00.050)       0:00:44.144 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_str",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:13 -0500 (0:00:00.038)       0:00:44.183 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo-mysql",
        "__podman_quadlet_type": "container",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:13 -0500 (0:00:00.055)       0:00:44.238 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:13 -0500 (0:00:00.113)       0:00:44.351 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:13 -0500 (0:00:00.038)       0:00:44.389 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:13 -0500 (0:00:00.054)       0:00:44.444 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "template_src": "quadlet-demo-mysql.container.j2"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:13 -0500 (0:00:00.053)       0:00:44.497 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:13 -0500 (0:00:00.068)       0:00:44.566 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:14 -0500 (0:00:00.459)       0:00:45.025 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:14 -0500 (0:00:00.061)       0:00:45.087 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:14 -0500 (0:00:00.038)       0:00:45.125 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:14 -0500 (0:00:00.042)       0:00:45.167 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:14 -0500 (0:00:00.047)       0:00:45.215 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:14 -0500 (0:00:00.050)       0:00:45.266 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:14 -0500 (0:00:00.038)       0:00:45.304 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:14 -0500 (0:00:00.041)       0:00:45.345 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:14 -0500 (0:00:00.039)       0:00:45.385 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [
            "quay.io/linux-system-roles/mysql:5.6"
        ],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-mysql.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:14 -0500 (0:00:00.059)       0:00:45.445 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:14 -0500 (0:00:00.036)       0:00:45.481 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_kube_yamls_raw | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:14 -0500 (0:00:00.054)       0:00:45.536 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [
            "quay.io/linux-system-roles/mysql:5.6"
        ],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
        "__podman_volumes": [
            "/tmp/quadlet_demo"
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:14 -0500 (0:00:00.114)       0:00:45.650 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:15 -0500 (0:00:00.098)       0:00:45.749 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:15 -0500 (0:00:00.085)       0:00:45.834 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:15 -0500 (0:00:00.172)       0:00:46.007 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:15 -0500 (0:00:00.275)       0:00:46.282 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:15 -0500 (0:00:00.087)       0:00:46.370 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:15 -0500 (0:00:00.062)       0:00:46.432 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:15 -0500 (0:00:00.088)       0:00:46.521 ******** 
changed: [managed-node3] => (item=/tmp/quadlet_demo) => {
    "ansible_loop_var": "item",
    "changed": true,
    "gid": 0,
    "group": "root",
    "item": "/tmp/quadlet_demo",
    "mode": "0777",
    "owner": "root",
    "path": "/tmp/quadlet_demo",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:16 -0500 (0:00:00.466)       0:00:46.988 ******** 
changed: [managed-node3] => (item=None) => {
    "attempts": 1,
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:23 -0500 (0:00:07.134)       0:00:54.122 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 67,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:23 -0500 (0:00:00.423)       0:00:54.546 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_quadlet_file_src | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:23 -0500 (0:00:00.031)       0:00:54.577 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
    "dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
    "gid": 0,
    "group": "root",
    "md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 363,
    "src": "/root/.ansible/tmp/ansible-tmp-1736189003.926421-13821-120873509621603/.source.container",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:24 -0500 (0:00:00.737)       0:00:55.314 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_content is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:24 -0500 (0:00:00.061)       0:00:55.375 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:25 -0500 (0:00:00.777)       0:00:56.153 ******** 
changed: [managed-node3] => {
    "changed": true,
    "name": "quadlet-demo-mysql.service",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "sysinit.target systemd-journald.socket system.slice tmp.mount -.mount network-online.target quadlet-demo-network.service basic.target quadlet-demo-mysql-volume.service",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target multi-user.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "yes",
        "DelegateControllers": "cpu cpuset io memory pids",
        "Description": "quadlet-demo-mysql.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-mysql.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3047419904",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-mysql.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "all",
        "OOMPolicy": "continue",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "quadlet-demo-mysql-volume.service system.slice -.mount quadlet-demo-network.service sysinit.target",
        "RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-mysql",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "notify",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:43:26 -0500 (0:00:01.021)       0:00:57.174 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_service_started is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:26 -0500 (0:00:00.056)       0:00:57.231 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n  name: envoy-proxy-config\ndata:\n  envoy.yaml: |\n    admin:\n      address:\n        socket_address:\n          address: 0.0.0.0\n          port_value: 9901\n\n    static_resources:\n      listeners:\n      - name: listener_0\n        address:\n          socket_address:\n            address: 0.0.0.0\n            port_value: 8080\n        filter_chains:\n        - filters:\n          - name: envoy.filters.network.http_connection_manager\n            typed_config:\n              \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n              stat_prefix: ingress_http\n              codec_type: AUTO\n              route_config:\n                name: local_route\n                virtual_hosts:\n                - name: local_service\n                  domains: [\"*\"]\n                  routes:\n                  - match:\n                      prefix: \"/\"\n                    route:\n                      cluster: backend\n              http_filters:\n              - name: envoy.filters.http.router\n                typed_config:\n                  \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n          transport_socket:\n            name: envoy.transport_sockets.tls\n            typed_config:\n              \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n              common_tls_context:\n                tls_certificates:\n                - certificate_chain:\n                    filename: /etc/envoy-certificates/certificate.pem\n                  private_key:\n                    filename: /etc/envoy-certificates/certificate.key\n      clusters:\n      - name: backend\n        connect_timeout: 5s\n        type: STATIC\n        dns_refresh_rate: 1800s\n        lb_policy: ROUND_ROBIN\n        load_assignment:\n          cluster_name: backend\n          endpoints:\n          - lb_endpoints:\n            - endpoint:\n                address:\n                  socket_address:\n                    address: 127.0.0.1\n                    port_value: 80",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:26 -0500 (0:00:00.082)       0:00:57.313 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:26 -0500 (0:00:00.058)       0:00:57.372 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:26 -0500 (0:00:00.036)       0:00:57.408 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "envoy-proxy-configmap",
        "__podman_quadlet_type": "yml",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:26 -0500 (0:00:00.058)       0:00:57.467 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:26 -0500 (0:00:00.061)       0:00:57.528 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:26 -0500 (0:00:00.033)       0:00:57.562 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:26 -0500 (0:00:00.079)       0:00:57.642 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "envoy-proxy-configmap.yml"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:26 -0500 (0:00:00.040)       0:00:57.682 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:27 -0500 (0:00:00.067)       0:00:57.750 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:27 -0500 (0:00:00.465)       0:00:58.215 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:27 -0500 (0:00:00.051)       0:00:58.267 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:27 -0500 (0:00:00.035)       0:00:58.302 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:27 -0500 (0:00:00.033)       0:00:58.336 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:27 -0500 (0:00:00.032)       0:00:58.368 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:27 -0500 (0:00:00.033)       0:00:58.402 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:27 -0500 (0:00:00.032)       0:00:58.434 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:27 -0500 (0:00:00.034)       0:00:58.469 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:27 -0500 (0:00:00.032)       0:00:58.501 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:27 -0500 (0:00:00.052)       0:00:58.554 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:27 -0500 (0:00:00.033)       0:00:58.587 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_kube_yamls_raw | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:27 -0500 (0:00:00.036)       0:00:58.623 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:28 -0500 (0:00:00.106)       0:00:58.729 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:28 -0500 (0:00:00.047)       0:00:58.777 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:28 -0500 (0:00:00.052)       0:00:58.829 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:28 -0500 (0:00:00.126)       0:00:58.955 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:28 -0500 (0:00:00.091)       0:00:59.047 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:28 -0500 (0:00:00.137)       0:00:59.185 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:28 -0500 (0:00:00.047)       0:00:59.232 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:28 -0500 (0:00:00.034)       0:00:59.267 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:28 -0500 (0:00:00.035)       0:00:59.303 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:28 -0500 (0:00:00.044)       0:00:59.347 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 103,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:29 -0500 (0:00:00.465)       0:00:59.812 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "d681c7d56f912150d041873e880818b22a90c188",
    "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
    "gid": 0,
    "group": "root",
    "md5sum": "aec75d972c231aac004e1338934544cf",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 2102,
    "src": "/root/.ansible/tmp/ansible-tmp-1736189009.173875-14061-170593450852424/.source.yml",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:29 -0500 (0:00:00.854)       0:01:00.667 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:30 -0500 (0:00:00.067)       0:01:00.734 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_file is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:30 -0500 (0:00:00.106)       0:01:00.841 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:30 -0500 (0:00:00.853)       0:01:01.695 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:43:31 -0500 (0:00:00.092)       0:01:01.787 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:31 -0500 (0:00:00.067)       0:01:01.854 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n  name: wp-pv-claim\n  labels:\n    app: wordpress\nspec:\n  accessModes:\n  - ReadWriteOnce\n  resources:\n    requests:\n      storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n  name: quadlet-demo\nspec:\n  containers:\n  - name: wordpress\n    image: quay.io/linux-system-roles/wordpress:4.8-apache\n    env:\n    - name: WORDPRESS_DB_HOST\n      value: quadlet-demo-mysql\n    - name: WORDPRESS_DB_PASSWORD\n      valueFrom:\n        secretKeyRef:\n          name: mysql-root-password-kube\n          key: password\n    volumeMounts:\n    - name: wordpress-persistent-storage\n      mountPath: /var/www/html\n    resources:\n      requests:\n        memory: \"64Mi\"\n        cpu: \"250m\"\n      limits:\n        memory: \"128Mi\"\n        cpu: \"500m\"\n  - name: envoy\n    image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n    volumeMounts:\n    - name: config-volume\n      mountPath: /etc/envoy\n    - name: certificates\n      mountPath: /etc/envoy-certificates\n    env:\n    - name: ENVOY_UID\n      value: \"0\"\n    resources:\n      requests:\n        memory: \"64Mi\"\n        cpu: \"250m\"\n      limits:\n        memory: \"128Mi\"\n        cpu: \"500m\"\n  volumes:\n  - name: config-volume\n    configMap:\n      name: envoy-proxy-config\n  - name: certificates\n    secret:\n      secretName: envoy-certificates\n  - name: wordpress-persistent-storage\n    persistentVolumeClaim:\n      claimName: wp-pv-claim\n  - name: www  # not used - for testing hostpath\n    hostPath:\n      path: /tmp/httpd3\n  - name: create  # not used - for testing hostpath\n    hostPath:\n      path: /tmp/httpd3-create\n",
        "__podman_quadlet_template_src": "quadlet-demo.yml.j2"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:31 -0500 (0:00:00.153)       0:01:02.008 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:31 -0500 (0:00:00.061)       0:01:02.069 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_str",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:31 -0500 (0:00:00.035)       0:01:02.104 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "yml",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:31 -0500 (0:00:00.053)       0:01:02.157 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:31 -0500 (0:00:00.066)       0:01:02.223 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:31 -0500 (0:00:00.039)       0:01:02.263 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:31 -0500 (0:00:00.033)       0:01:02.297 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "template_src": "quadlet-demo.yml.j2"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:31 -0500 (0:00:00.042)       0:01:02.339 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:31 -0500 (0:00:00.062)       0:01:02.401 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:32 -0500 (0:00:00.529)       0:01:02.931 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:32 -0500 (0:00:00.033)       0:01:02.965 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:32 -0500 (0:00:00.033)       0:01:02.998 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:32 -0500 (0:00:00.032)       0:01:03.031 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:32 -0500 (0:00:00.034)       0:01:03.065 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:32 -0500 (0:00:00.032)       0:01:03.097 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:32 -0500 (0:00:00.033)       0:01:03.131 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:32 -0500 (0:00:00.032)       0:01:03.163 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:32 -0500 (0:00:00.038)       0:01:03.202 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:32 -0500 (0:00:00.073)       0:01:03.275 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:32 -0500 (0:00:00.045)       0:01:03.321 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_kube_yamls_raw | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:32 -0500 (0:00:00.036)       0:01:03.357 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:32 -0500 (0:00:00.116)       0:01:03.474 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:32 -0500 (0:00:00.041)       0:01:03.515 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:32 -0500 (0:00:00.031)       0:01:03.547 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:32 -0500 (0:00:00.064)       0:01:03.611 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:32 -0500 (0:00:00.050)       0:01:03.662 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:32 -0500 (0:00:00.030)       0:01:03.692 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:33 -0500 (0:00:00.029)       0:01:03.722 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:33 -0500 (0:00:00.072)       0:01:03.795 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:33 -0500 (0:00:00.032)       0:01:03.827 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:33 -0500 (0:00:00.032)       0:01:03.859 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 136,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:33 -0500 (0:00:00.571)       0:01:04.430 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_quadlet_file_src | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:33 -0500 (0:00:00.032)       0:01:04.463 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
    "dest": "/etc/containers/systemd/quadlet-demo.yml",
    "gid": 0,
    "group": "root",
    "md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 1605,
    "src": "/root/.ansible/tmp/ansible-tmp-1736189013.8411329-14251-232144668374786/.source.yml",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:34 -0500 (0:00:00.977)       0:01:05.440 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_content is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:34 -0500 (0:00:00.055)       0:01:05.496 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:35 -0500 (0:00:00.810)       0:01:06.307 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:43:35 -0500 (0:00:00.036)       0:01:06.343 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:43:35 -0500 (0:00:00.035)       0:01:06.378 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo.kube",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:43:35 -0500 (0:00:00.045)       0:01:06.423 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:43:35 -0500 (0:00:00.045)       0:01:06.468 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:43:35 -0500 (0:00:00.046)       0:01:06.515 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:43:35 -0500 (0:00:00.116)       0:01:06.631 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:43:36 -0500 (0:00:00.159)       0:01:06.791 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:43:36 -0500 (0:00:00.082)       0:01:06.874 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:43:36 -0500 (0:00:00.066)       0:01:06.940 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo.kube"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:43:36 -0500 (0:00:00.088)       0:01:07.029 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:43:36 -0500 (0:00:00.131)       0:01:07.161 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:43:36 -0500 (0:00:00.504)       0:01:07.665 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:43:37 -0500 (0:00:00.069)       0:01:07.734 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:43:37 -0500 (0:00:00.191)       0:01:07.926 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:43:37 -0500 (0:00:00.052)       0:01:07.979 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:43:37 -0500 (0:00:00.053)       0:01:08.032 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:43:37 -0500 (0:00:00.059)       0:01:08.092 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:43:37 -0500 (0:00:00.061)       0:01:08.153 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:43:37 -0500 (0:00:00.067)       0:01:08.221 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:43:37 -0500 (0:00:00.059)       0:01:08.280 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": [
            "quadlet-demo.yml"
        ],
        "__podman_service_name": "quadlet-demo.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:43:37 -0500 (0:00:00.107)       0:01:08.388 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:43:37 -0500 (0:00:00.062)       0:01:08.450 ******** 
ok: [managed-node3] => {
    "changed": false,
    "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
    "encoding": "base64",
    "source": "/etc/containers/systemd/quadlet-demo.yml"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:43:38 -0500 (0:00:00.403)       0:01:08.853 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [
            "quay.io/linux-system-roles/wordpress:4.8-apache",
            "quay.io/linux-system-roles/envoyproxy:v1.25.0"
        ],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
        "__podman_volumes": [
            "/tmp/httpd3",
            "/tmp/httpd3-create"
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:43:38 -0500 (0:00:00.169)       0:01:09.023 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:43:38 -0500 (0:00:00.061)       0:01:09.084 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:43:38 -0500 (0:00:00.053)       0:01:09.138 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 06 January 2025  13:43:38 -0500 (0:00:00.113)       0:01:09.251 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:43:38 -0500 (0:00:00.084)       0:01:09.336 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:43:38 -0500 (0:00:00.049)       0:01:09.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:43:38 -0500 (0:00:00.046)       0:01:09.432 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 06 January 2025  13:43:38 -0500 (0:00:00.048)       0:01:09.480 ******** 
changed: [managed-node3] => (item=/tmp/httpd3) => {
    "ansible_loop_var": "item",
    "changed": true,
    "gid": 0,
    "group": "root",
    "item": "/tmp/httpd3",
    "mode": "0755",
    "owner": "root",
    "path": "/tmp/httpd3",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}
changed: [managed-node3] => (item=/tmp/httpd3-create) => {
    "ansible_loop_var": "item",
    "changed": true,
    "gid": 0,
    "group": "root",
    "item": "/tmp/httpd3-create",
    "mode": "0755",
    "owner": "root",
    "path": "/tmp/httpd3-create",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 06 January 2025  13:43:39 -0500 (0:00:00.825)       0:01:10.306 ******** 
changed: [managed-node3] => (item=None) => {
    "attempts": 1,
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => (item=None) => {
    "attempts": 1,
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 06 January 2025  13:43:57 -0500 (0:00:17.872)       0:01:28.178 ******** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/systemd",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 160,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Monday 06 January 2025  13:43:57 -0500 (0:00:00.462)       0:01:28.641 ******** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
    "dest": "/etc/containers/systemd/quadlet-demo.kube",
    "gid": 0,
    "group": "root",
    "md5sum": "da53c88f92b68b0487aa209f795b6bb3",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 456,
    "src": "/root/.ansible/tmp/ansible-tmp-1736189037.9919684-15082-277151472698982/.source.kube",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Monday 06 January 2025  13:43:58 -0500 (0:00:00.700)       0:01:29.341 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Monday 06 January 2025  13:43:58 -0500 (0:00:00.033)       0:01:29.375 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_copy_file is skipped",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Monday 06 January 2025  13:43:58 -0500 (0:00:00.030)       0:01:29.405 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Monday 06 January 2025  13:43:59 -0500 (0:00:00.830)       0:01:30.236 ******** 
changed: [managed-node3] => {
    "changed": true,
    "name": "quadlet-demo.service",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "network-online.target quadlet-demo-network.service sysinit.target -.mount quadlet-demo-mysql.service system.slice basic.target systemd-journald.socket",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target multi-user.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "2578788352",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "all",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "-.mount system.slice quadlet-demo-mysql.service quadlet-demo-network.service sysinit.target",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "notify",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Monday 06 January 2025  13:44:01 -0500 (0:00:01.538)       0:01:31.775 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_service_started is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Monday 06 January 2025  13:44:01 -0500 (0:00:00.054)       0:01:31.829 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Monday 06 January 2025  13:44:01 -0500 (0:00:00.047)       0:01:31.877 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Monday 06 January 2025  13:44:01 -0500 (0:00:00.048)       0:01:31.925 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96
Monday 06 January 2025  13:44:01 -0500 (0:00:00.078)       0:01:32.003 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "ls",
        "-alrtF",
        "/etc/containers/systemd"
    ],
    "delta": "0:00:00.004513",
    "end": "2025-01-06 13:44:01.695378",
    "rc": 0,
    "start": "2025-01-06 13:44:01.690865"
}

STDOUT:

total 24
drwxr-xr-x. 9 root root  175 Jan  6 13:42 ../
-rw-r--r--. 1 root root   74 Jan  6 13:43 quadlet-demo.network
-rw-r--r--. 1 root root    9 Jan  6 13:43 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root  363 Jan  6 13:43 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Jan  6 13:43 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Jan  6 13:43 quadlet-demo.yml
-rw-r--r--. 1 root root  456 Jan  6 13:43 quadlet-demo.kube
drwxr-xr-x. 2 root root  185 Jan  6 13:43 ./

TASK [Check containers] ********************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100
Monday 06 January 2025  13:44:01 -0500 (0:00:00.504)       0:01:32.508 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "-a"
    ],
    "delta": "0:00:00.059630",
    "end": "2025-01-06 13:44:02.318605",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:02.258975"
}

STDOUT:

CONTAINER ID  IMAGE                                            COMMAND               CREATED         STATUS                   PORTS                                                      NAMES
a9d3cc1040b1  quay.io/linux-system-roles/mysql:5.6             mysqld                36 seconds ago  Up 36 seconds (healthy)  3306/tcp                                                   quadlet-demo-mysql
5b2c81a505bb  localhost/podman-pause:5.3.1-1733097600                                1 second ago    Up 2 seconds                                                                        a96f3a51b8d1-service
48c70ece6166  localhost/podman-pause:5.3.1-1733097600                                1 second ago    Up 2 seconds             0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp             36d3302583a7-infra
9c5cf0b2b75a  quay.io/linux-system-roles/wordpress:4.8-apache  apache2-foregroun...  1 second ago    Up 2 seconds             0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp     quadlet-demo-wordpress
b8925aeec984  quay.io/linux-system-roles/envoyproxy:v1.25.0    envoy -c /etc/env...  1 second ago    Up 2 seconds             0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp  quadlet-demo-envoy

TASK [Check volumes] ***********************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105
Monday 06 January 2025  13:44:02 -0500 (0:00:00.580)       0:01:33.088 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls"
    ],
    "delta": "0:00:00.028825",
    "end": "2025-01-06 13:44:02.762427",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:02.733602"
}

STDOUT:

DRIVER      VOLUME NAME
local       systemd-quadlet-demo-mysql
local       wp-pv-claim
local       envoy-proxy-config
local       envoy-certificates

TASK [Check pods] **************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110
Monday 06 January 2025  13:44:02 -0500 (0:00:00.463)       0:01:33.551 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "pod",
        "ps",
        "--ctr-ids",
        "--ctr-names",
        "--ctr-status"
    ],
    "delta": "0:00:00.033987",
    "end": "2025-01-06 13:44:03.279742",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:03.245755"
}

STDOUT:

POD ID        NAME          STATUS      CREATED        INFRA ID      IDS                                     NAMES                                                         STATUS
36d3302583a7  quadlet-demo  Running     2 seconds ago  48c70ece6166  48c70ece6166,9c5cf0b2b75a,b8925aeec984  36d3302583a7-infra,quadlet-demo-wordpress,quadlet-demo-envoy  running,running,running

TASK [Check systemd] ***********************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115
Monday 06 January 2025  13:44:03 -0500 (0:00:00.493)       0:01:34.044 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
    "delta": "0:00:00.012950",
    "end": "2025-01-06 13:44:03.681225",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:03.668275"
}

STDOUT:

  quadlet-demo-mysql-volume.service                                                                                                    loaded active exited    quadlet-demo-mysql-volume.service
  quadlet-demo-mysql.service                                                                                                           loaded active running   quadlet-demo-mysql.service
  quadlet-demo-network.service                                                                                                         loaded active exited    quadlet-demo-network.service
  quadlet-demo.service                                                                                                                 loaded active running   quadlet-demo.service

TASK [Check web] ***************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
Monday 06 January 2025  13:44:03 -0500 (0:00:00.400)       0:01:34.445 ******** 
FAILED - RETRYING: [managed-node3]: Check web (6 retries left).
FAILED - RETRYING: [managed-node3]: Check web (5 retries left).
FAILED - RETRYING: [managed-node3]: Check web (4 retries left).
FAILED - RETRYING: [managed-node3]: Check web (3 retries left).
FAILED - RETRYING: [managed-node3]: Check web (2 retries left).
FAILED - RETRYING: [managed-node3]: Check web (1 retries left).
fatal: [managed-node3]: FAILED! => {
    "attempts": 6,
    "changed": false,
    "dest": "/run/out",
    "elapsed": 0,
    "url": "https://localhost:8000"
}

MSG:

Request failed: <urlopen error [Errno 113] No route to host>

TASK [Dump journal] ************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142
Monday 06 January 2025  13:44:36 -0500 (0:00:33.077)       0:02:07.522 ******** 
fatal: [managed-node3]: FAILED! => {
    "changed": false,
    "cmd": [
        "journalctl",
        "-ex"
    ],
    "delta": "0:00:00.027306",
    "end": "2025-01-06 13:44:37.171340",
    "failed_when_result": true,
    "rc": 0,
    "start": "2025-01-06 13:44:37.144034"
}

STDOUT:

Jan 06 13:36:19 localhost irqbalance[642]: IRQ 56 affinity is now unmanaged
Jan 06 13:36:19 localhost irqbalance[642]: Cannot change IRQ 57 affinity: Permission denied
Jan 06 13:36:19 localhost irqbalance[642]: IRQ 57 affinity is now unmanaged
Jan 06 13:36:19 localhost irqbalance[642]: Cannot change IRQ 58 affinity: Permission denied
Jan 06 13:36:19 localhost irqbalance[642]: IRQ 58 affinity is now unmanaged
Jan 06 13:36:19 localhost irqbalance[642]: Cannot change IRQ 59 affinity: Permission denied
Jan 06 13:36:19 localhost irqbalance[642]: IRQ 59 affinity is now unmanaged
Jan 06 13:36:20 localhost cloud-init[676]: 2025-01-06 18:36:20,032 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000bc20>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740030530>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:20 localhost cloud-init[676]: 2025-01-06 18:36:20,032 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [5/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740030530>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:22 localhost cloud-init[676]: 2025-01-06 18:36:22,184 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740031880>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400322d0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:22 localhost cloud-init[676]: 2025-01-06 18:36:22,184 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [8/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400322d0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:24 localhost cloud-init[676]: 2025-01-06 18:36:24,336 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400332f0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033d40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:24 localhost cloud-init[676]: 2025-01-06 18:36:24,336 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [10/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033d40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:26 localhost cloud-init[676]: 2025-01-06 18:36:26,488 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033890>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033680>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:26 localhost cloud-init[676]: 2025-01-06 18:36:26,489 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [12/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033680>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:28 localhost cloud-init[676]: 2025-01-06 18:36:28,641 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740060d40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061760>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:28 localhost cloud-init[676]: 2025-01-06 18:36:28,641 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [14/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061760>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:30 localhost cloud-init[676]: 2025-01-06 18:36:30,793 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400614c0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061010>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:30 localhost cloud-init[676]: 2025-01-06 18:36:30,793 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [16/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061010>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:33 localhost cloud-init[676]: 2025-01-06 18:36:33,946 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740062720>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063170>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:33 localhost cloud-init[676]: 2025-01-06 18:36:33,946 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [19/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063170>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:37 localhost cloud-init[676]: 2025-01-06 18:36:37,098 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea78230>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea78c50>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:37 localhost cloud-init[676]: 2025-01-06 18:36:37,098 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [22/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea78c50>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:40 localhost cloud-init[676]: 2025-01-06 18:36:40,251 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063d70>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea78920>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:40 localhost cloud-init[676]: 2025-01-06 18:36:40,251 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [26/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea78920>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:43 localhost cloud-init[676]: 2025-01-06 18:36:43,403 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79ca0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7a6c0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:43 localhost cloud-init[676]: 2025-01-06 18:36:43,403 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [29/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7a6c0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:46 localhost cloud-init[676]: 2025-01-06 18:36:46,555 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7b710>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73eab0170>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:46 localhost cloud-init[676]: 2025-01-06 18:36:46,556 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [32/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73eab0170>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:50 localhost cloud-init[676]: 2025-01-06 18:36:50,708 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7be00>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7b6b0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:50 localhost cloud-init[676]: 2025-01-06 18:36:50,708 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [36/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7b6b0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:54 localhost cloud-init[676]: 2025-01-06 18:36:54,860 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73eab11c0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73eab1bb0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:54 localhost cloud-init[676]: 2025-01-06 18:36:54,860 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [40/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73eab1bb0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:36:59 localhost cloud-init[676]: 2025-01-06 18:36:59,014 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7b170>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79940>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:36:59 localhost cloud-init[676]: 2025-01-06 18:36:59,014 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [44/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79940>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:03 localhost cloud-init[676]: 2025-01-06 18:37:03,167 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79790>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7a2a0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:03 localhost cloud-init[676]: 2025-01-06 18:37:03,167 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [49/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea7a2a0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:07 localhost cloud-init[676]: 2025-01-06 18:37:07,319 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063560>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063a40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:07 localhost cloud-init[676]: 2025-01-06 18:37:07,319 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [53/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063a40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:12 localhost cloud-init[676]: 2025-01-06 18:37:12,471 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061fd0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79370>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:12 localhost cloud-init[676]: 2025-01-06 18:37:12,471 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [58/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff73ea79370>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:17 localhost cloud-init[676]: 2025-01-06 18:37:17,623 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063710>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061670>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:17 localhost cloud-init[676]: 2025-01-06 18:37:17,624 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [63/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061670>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:22 localhost cloud-init[676]: 2025-01-06 18:37:22,776 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740061c10>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740060080>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:22 localhost cloud-init[676]: 2025-01-06 18:37:22,776 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [68/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740060080>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:27 localhost cloud-init[676]: 2025-01-06 18:37:27,928 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063a40>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400331d0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:27 localhost cloud-init[676]: 2025-01-06 18:37:27,928 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [73/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400331d0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:33 localhost cloud-init[676]: 2025-01-06 18:37:33,080 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400605f0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063b00>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:33 localhost cloud-init[676]: 2025-01-06 18:37:33,080 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [78/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740063b00>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:39 localhost cloud-init[676]: 2025-01-06 18:37:39,232 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740032ed0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033a10>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:39 localhost cloud-init[676]: 2025-01-06 18:37:39,232 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [85/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033a10>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:45 localhost cloud-init[676]: 2025-01-06 18:37:45,385 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400311c0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033050>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:45 localhost cloud-init[676]: 2025-01-06 18:37:45,385 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [91/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740033050>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:51 localhost cloud-init[676]: 2025-01-06 18:37:51,537 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740031fa0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740032120>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:51 localhost cloud-init[676]: 2025-01-06 18:37:51,537 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [97/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740032120>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:37:57 localhost cloud-init[676]: 2025-01-06 18:37:57,690 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff740030aa0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400318e0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:37:57 localhost cloud-init[676]: 2025-01-06 18:37:57,690 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [103/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff7400318e0>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:38:03 localhost cloud-init[676]: 2025-01-06 18:38:03,842 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000bf80>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000a300>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:38:03 localhost cloud-init[676]: 2025-01-06 18:38:03,843 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [109/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000a300>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:38:10 localhost cloud-init[676]: 2025-01-06 18:38:10,995 - url_helper.py[WARNING]: Exception(s) [UrlError("HTTPConnectionPool(host='169.254.169.254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000a990>: Failed to establish a new connection: [Errno 101] Network is unreachable'))"), UrlError("HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000b410>: Failed to establish a new connection: [Errno 101] Network is unreachable'))")] during request to http://[fd00:ec2::254]/latest/api/token, raising last exception
Jan 06 13:38:11 localhost cloud-init[676]: 2025-01-06 18:38:10,995 - url_helper.py[WARNING]: Calling 'http://[fd00:ec2::254]/latest/api/token' failed [116/120s]: request error [HTTPConnectionPool(host='fd00:ec2::254', port=80): Max retries exceeded with url: /latest/api/token (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ff74000b410>: Failed to establish a new connection: [Errno 101] Network is unreachable'))]
Jan 06 13:38:11 localhost cloud-init[676]: 2025-01-06 18:38:10,995 - url_helper.py[ERROR]: Timed out, no response from urls: ['http://169.254.169.254/latest/api/token', 'http://[fd00:ec2::254]/latest/api/token']
Jan 06 13:38:11 localhost cloud-init[676]: 2025-01-06 18:38:10,995 - DataSourceEc2.py[WARNING]: IMDS's HTTP endpoint is probably disabled
Jan 06 13:38:11 localhost systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking).
â–‘â–‘ Subject: A start job for unit cloud-init-local.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-init-local.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 243.
Jan 06 13:38:11 localhost systemd[1]: Reached target network-pre.target - Preparation for Network.
â–‘â–‘ Subject: A start job for unit network-pre.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit network-pre.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 160.
Jan 06 13:38:11 localhost systemd[1]: Starting NetworkManager.service - Network Manager...
â–‘â–‘ Subject: A start job for unit NetworkManager.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 224.
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.1626] NetworkManager (version 1.51.4-1.el10) is starting... (boot:3d25e50e-61e0-4490-b0a1-ed5bc374e8b1)
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.1628] Read config: /etc/NetworkManager/NetworkManager.conf, /etc/NetworkManager/conf.d/30-cloud-init-ip6-addr-gen-mode.conf
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.1749] manager[0x5613926b9fb0]: monitoring kernel firmware directory '/lib/firmware'.
Jan 06 13:38:11 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
â–‘â–‘ Subject: A start job for unit systemd-hostnamed.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-hostnamed.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 339.
Jan 06 13:38:11 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service.
â–‘â–‘ Subject: A start job for unit systemd-hostnamed.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-hostnamed.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 339.
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.2354] hostname: hostname: using hostnamed
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.2358] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.2360] policy: set-hostname: set hostname to 'localhost.localdomain' (no hostname found)
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.2363] manager[0x5613926b9fb0]: rfkill: Wi-Fi hardware radio set enabled
Jan 06 13:38:11 localhost NetworkManager[778]: <info>  [1736188691.2363] manager[0x5613926b9fb0]: rfkill: WWAN hardware radio set enabled
Jan 06 13:38:11 localhost.localdomain systemd-hostnamed[783]: Hostname set to <localhost.localdomain> (transient)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2388] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2389] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2389] manager: Networking is enabled by state file
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2405] settings: Loaded settings plugin: keyfile (internal)
Jan 06 13:38:11 localhost.localdomain systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 417.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2472] dhcp: init: Using DHCP client 'internal'
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2475] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2508] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2526] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2533] device (lo): Activation: starting connection 'lo' (1f5df0eb-8fa7-4519-8a00-e8151bf25963)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2538] device (eth0): carrier: link connected
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2541] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2547] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain systemd[1]: Started NetworkManager.service - Network Manager.
â–‘â–‘ Subject: A start job for unit NetworkManager.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 224.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2574] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2579] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain systemd[1]: Reached target network.target - Network.
â–‘â–‘ Subject: A start job for unit network.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit network.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 226.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online...
â–‘â–‘ Subject: A start job for unit NetworkManager-wait-online.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-wait-online.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 223.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2581] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2636] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2641] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2651] device (eth0): state change: unavailable -> disconnected (reason 'none', managed-type: 'full')
Jan 06 13:38:11 localhost.localdomain systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon...
â–‘â–‘ Subject: A start job for unit gssproxy.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit gssproxy.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 271.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2685] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2689] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2690] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2692] manager: NetworkManager state is now CONNECTING
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2694] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2708] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2721] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2736] dhcp4 (eth0): state changed new lease, address=10.31.45.81, acd pending
Jan 06 13:38:11 localhost.localdomain systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 417.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2831] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2835] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.2839] device (lo): Activation: successful, device activated.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon.
â–‘â–‘ Subject: A start job for unit gssproxy.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit gssproxy.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 271.
Jan 06 13:38:11 localhost.localdomain systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
â–‘â–‘ Subject: A start job for unit rpc-gssd.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit rpc-gssd.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 267.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Reached target nfs-client.target - NFS client services.
â–‘â–‘ Subject: A start job for unit nfs-client.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit nfs-client.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 264.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems.
â–‘â–‘ Subject: A start job for unit remote-fs-pre.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit remote-fs-pre.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 272.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes.
â–‘â–‘ Subject: A start job for unit remote-cryptsetup.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit remote-cryptsetup.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 261.
Jan 06 13:38:11 localhost.localdomain systemd[1]: Reached target remote-fs.target - Remote File Systems.
â–‘â–‘ Subject: A start job for unit remote-fs.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit remote-fs.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 263.
Jan 06 13:38:11 localhost.localdomain systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
â–‘â–‘ Subject: A start job for unit systemd-pcrphase.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-pcrphase.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 191.
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.4287] dhcp4 (eth0): state changed new lease, address=10.31.45.81
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.4298] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS
Jan 06 13:38:11 localhost.localdomain NetworkManager[778]: <info>  [1736188691.4301] policy: set-hostname: set hostname to 'ip-10-31-45-81' (from DHCPv4)
Jan 06 13:38:11 ip-10-31-45-81 systemd-hostnamed[783]: Hostname set to <ip-10-31-45-81> (transient)
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4334] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4365] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4372] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4377] manager: NetworkManager state is now CONNECTED_SITE
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4379] device (eth0): Activation: successful, device activated.
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4384] manager: NetworkManager state is now CONNECTED_GLOBAL
Jan 06 13:38:11 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188691.4387] manager: startup complete
Jan 06 13:38:11 ip-10-31-45-81 systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online.
â–‘â–‘ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-wait-online.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 223.
Jan 06 13:38:11 ip-10-31-45-81 systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)...
â–‘â–‘ Subject: A start job for unit cloud-init.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-init.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 242.
Jan 06 13:38:11 ip-10-31-45-81 chronyd[669]: Added source 10.11.160.238
Jan 06 13:38:11 ip-10-31-45-81 chronyd[669]: Added source 10.18.100.10
Jan 06 13:38:11 ip-10-31-45-81 chronyd[669]: Added source 10.2.32.37
Jan 06 13:38:11 ip-10-31-45-81 chronyd[669]: Added source 10.2.32.38
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: Cloud-init v. 24.1.4-21.el10 running 'init' at Mon, 06 Jan 2025 18:38:11 +0000. Up 132.93 seconds.
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info++++++++++++++++++++++++++++++++++++++
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: | Device |  Up  |          Address           |      Mask     | Scope  |     Hw-Address    |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |  eth0  | True |        10.31.45.81         | 255.255.252.0 | global | 02:a7:7f:6e:7d:d9 |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |  eth0  | True | fe80::a7:7fff:fe6e:7dd9/64 |       .       |  link  | 02:a7:7f:6e:7d:d9 |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   lo   | True |         127.0.0.1          |   255.0.0.0   |  host  |         .         |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   lo   | True |          ::1/128           |       .       |  host  |         .         |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: | Route | Destination |  Gateway   |    Genmask    | Interface | Flags |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   0   |   0.0.0.0   | 10.31.44.1 |    0.0.0.0    |    eth0   |   UG  |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   1   |  10.31.44.0 |  0.0.0.0   | 255.255.252.0 |    eth0   |   U   |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+---------+-----------+-------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: | Route | Destination | Gateway | Interface | Flags |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+---------+-----------+-------+
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   0   |  fe80::/64  |    ::   |    eth0   |   U   |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: |   2   |  multicast  |    ::   |    eth0   |   U   |
Jan 06 13:38:11 ip-10-31-45-81 cloud-init[871]: ci-info: +-------+-------------+---------+-----------+-------+
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0398] audit: op="connections-load" args="/etc/NetworkManager/system-connections/cloud-init-eth0.nmconnection" pid=883 uid=0 result="success"
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0551] agent-manager: agent[db9fe34e1ed9d838,:1.8/nmcli-connect/0]: agent registered
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0558] device (eth0): state change: activated -> deactivating (reason 'new-activation', managed-type: 'full')
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0560] manager: NetworkManager state is now DISCONNECTING
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0566] device (eth0): disconnecting for new activation request.
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0567] audit: op="connection-activate" uuid="1dd9a779-d327-56e1-8454-c65e2556c12c" name="cloud-init eth0" pid=888 uid=0 result="success"
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0594] device (eth0): state change: deactivating -> disconnected (reason 'new-activation', managed-type: 'full')
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0647] dhcp4 (eth0): canceled DHCP transaction
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0647] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0647] dhcp4 (eth0): state changed no lease
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0940] manager: NetworkManager state is now DISCONNECTED
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0942] policy: set-hostname: set hostname to 'localhost.localdomain' (no hostname found)
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.0942] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.0955] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Jan 06 13:38:12 localhost.localdomain systemd-hostnamed[783]: Hostname set to <localhost.localdomain> (transient)
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.0962] manager: NetworkManager state is now CONNECTING
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.0963] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full')
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.0968] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full')
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.0972] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.1000] dhcp4 (eth0): state changed new lease, address=10.31.45.81, acd pending
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Removed source 10.11.160.238
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Removed source 10.18.100.10
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Removed source 10.2.32.37
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Removed source 10.2.32.38
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Source 144.202.66.214 offline
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Source 45.63.54.13 offline
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Source 192.189.65.187 offline
Jan 06 13:38:12 localhost.localdomain chronyd[669]: Source 12.167.151.1 offline
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.2657] dhcp4 (eth0): state changed new lease, address=10.31.45.81
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.2669] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS
Jan 06 13:38:12 localhost.localdomain NetworkManager[778]: <info>  [1736188692.2673] policy: set-hostname: set hostname to 'ip-10-31-45-81' (from DHCPv4)
Jan 06 13:38:12 ip-10-31-45-81 systemd-hostnamed[783]: Hostname set to <ip-10-31-45-81> (transient)
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2721] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2751] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2758] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2761] manager: NetworkManager state is now CONNECTED_SITE
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2763] device (eth0): Activation: successful, device activated.
Jan 06 13:38:12 ip-10-31-45-81 NetworkManager[778]: <info>  [1736188692.2768] manager: NetworkManager state is now CONNECTED_GLOBAL
Jan 06 13:38:12 ip-10-31-45-81 chronyd[669]: Source 144.202.66.214 online
Jan 06 13:38:12 ip-10-31-45-81 chronyd[669]: Source 45.63.54.13 online
Jan 06 13:38:12 ip-10-31-45-81 chronyd[669]: Source 192.189.65.187 online
Jan 06 13:38:12 ip-10-31-45-81 chronyd[669]: Source 12.167.151.1 online
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-hostnamed[783]: Hostname set to <ip-10-31-45-81.us-east-1.aws.redhat.com> (static)
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com NetworkManager[778]: <info>  [1736188692.3103] hostname: static hostname changed from (none) to "ip-10-31-45-81.us-east-1.aws.redhat.com"
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com chronyd[669]: Added source 10.11.160.238
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com chronyd[669]: Added source 10.18.100.10
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com chronyd[669]: Added source 10.2.32.37
Jan 06 13:38:12 ip-10-31-45-81.us-east-1.aws.redhat.com chronyd[669]: Added source 10.2.32.38
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Generating public/private rsa key pair.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key fingerprint is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: SHA256:eeiS5uhl91VJjf+uUTiYEKgItggkJpwIfTbmo87IRkE root@ip-10-31-45-81.us-east-1.aws.redhat.com
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key's randomart image is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +---[RSA 3072]----+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |O+.      ..      |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |*E+ =   .  .   o |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |oo B o .  .   o .|
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |... + .  o . + + |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |  .. .  S . o = o|
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: | ..    o .   . o.|
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |o+    * o   . . .|
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |.oo  * o . .   o |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |.  .o .   .   ...|
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +----[SHA256]-----+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Generating public/private ecdsa key pair.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key fingerprint is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: SHA256:h7GyvBWfmNx0yHjWGICic601xQ/fz2aGMS9rSY0Axuc root@ip-10-31-45-81.us-east-1.aws.redhat.com
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key's randomart image is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +---[ECDSA 256]---+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |       +.        |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |    . . B..      |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |   . o o.B..     |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |  o . +  *E=+    |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |   o o..S O.oX   |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |    .. + @ o= O  |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |      o = +. B   |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |       o    +    |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |      .    .     |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +----[SHA256]-----+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Generating public/private ed25519 key pair.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key fingerprint is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: SHA256:qZmTFysyuQx5KVcJ9E0aypB4seJF01PvkYlHkGp1qhM root@ip-10-31-45-81.us-east-1.aws.redhat.com
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: The key's randomart image is:
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +--[ED25519 256]--+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: | .o=o oo+.       |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |. +=.= B+.o      |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |....+ *.+*       |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |...  E oo..      |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: | .  . = S.       |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |   . * = o       |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |  + B O o        |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |   * + +         |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: |    o            |
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[871]: +----[SHA256]-----+
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler).
â–‘â–‘ Subject: A start job for unit cloud-init.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-init.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 242.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability.
â–‘â–‘ Subject: A start job for unit cloud-config.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-config.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 252.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online.
â–‘â–‘ Subject: A start job for unit network-online.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit network-online.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 222.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config...
â–‘â–‘ Subject: A start job for unit cloud-config.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-config.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 251.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming...
â–‘â–‘ Subject: A start job for unit kdump.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit kdump.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 262.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness....
â–‘â–‘ Subject: A start job for unit restraintd.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit restraintd.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 277.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart...
â–‘â–‘ Subject: A start job for unit rpc-statd-notify.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit rpc-statd-notify.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 265.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
â–‘â–‘ Subject: A start job for unit sshd.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 244.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com sm-notify[1035]: Version 2.8.2 starting
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness..
â–‘â–‘ Subject: A start job for unit restraintd.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit restraintd.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 277.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart.
â–‘â–‘ Subject: A start job for unit rpc-statd-notify.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit rpc-statd-notify.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 265.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com (sshd)[1036]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com sshd[1036]: Server listening on 0.0.0.0 port 22.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
â–‘â–‘ Subject: A start job for unit sshd.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 244.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com sshd[1036]: Server listening on :: port 22.
Jan 06 13:38:13 ip-10-31-45-81.us-east-1.aws.redhat.com restraintd[1038]: Listening on http://localhost:8081
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com kdumpctl[1044]: kdump: Detected change(s) in the following file(s):  /etc/fstab
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1213]: Cloud-init v. 24.1.4-21.el10 running 'modules:config' at Mon, 06 Jan 2025 18:38:14 +0000. Up 135.31 seconds.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon...
â–‘â–‘ Subject: A stop job for unit sshd.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A stop job for unit sshd.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 521.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com sshd[1036]: Received signal 15; terminating.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit sshd.service has successfully entered the 'dead' state.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon.
â–‘â–‘ Subject: A stop job for unit sshd.service has finished
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A stop job for unit sshd.service has finished.
â–‘â–‘ 
â–‘â–‘ The job identifier is 521 and the job result is done.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target.
â–‘â–‘ Subject: A stop job for unit sshd-keygen.target has finished
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A stop job for unit sshd-keygen.target has finished.
â–‘â–‘ 
â–‘â–‘ The job identifier is 605 and the job result is done.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target...
â–‘â–‘ Subject: A stop job for unit sshd-keygen.target has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A stop job for unit sshd-keygen.target has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 605.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration).
â–‘â–‘ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit ssh-host-keys-migration.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 599.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
â–‘â–‘ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd-keygen@ecdsa.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 603.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
â–‘â–‘ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd-keygen@ed25519.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 604.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
â–‘â–‘ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd-keygen@rsa.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 601.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target.
â–‘â–‘ Subject: A start job for unit sshd-keygen.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd-keygen.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 605.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
â–‘â–‘ Subject: A start job for unit sshd.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 521.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com (sshd)[1280]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com sshd[1280]: Server listening on 0.0.0.0 port 22.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com sshd[1280]: Server listening on :: port 22.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
â–‘â–‘ Subject: A start job for unit sshd.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit sshd.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 521.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config.
â–‘â–‘ Subject: A start job for unit cloud-config.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-config.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 251.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts...
â–‘â–‘ Subject: A start job for unit cloud-final.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-final.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 253.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions...
â–‘â–‘ Subject: A start job for unit systemd-user-sessions.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-user-sessions.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 278.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions.
â–‘â–‘ Subject: A start job for unit systemd-user-sessions.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-user-sessions.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 278.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler.
â–‘â–‘ Subject: A start job for unit crond.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit crond.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 227.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1.
â–‘â–‘ Subject: A start job for unit getty@tty1.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit getty@tty1.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 239.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0.
â–‘â–‘ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit serial-getty@ttyS0.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 234.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts.
â–‘â–‘ Subject: A start job for unit getty.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit getty.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 233.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com crond[1293]: (CRON) STARTUP (1.7.0)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System.
â–‘â–‘ Subject: A start job for unit multi-user.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit multi-user.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 123.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com crond[1293]: (CRON) INFO (Syslog will be used instead of sendmail.)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com crond[1293]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 77% if used.)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com crond[1293]: (CRON) INFO (running with inotify support)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP...
â–‘â–‘ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-update-utmp-runlevel.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 228.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP.
â–‘â–‘ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-update-utmp-runlevel.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 228.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1307]: Cloud-init v. 24.1.4-21.el10 running 'modules:final' at Mon, 06 Jan 2025 18:38:14 +0000. Up 135.90 seconds.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1362]: #############################################################
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1368]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1370]: 256 SHA256:h7GyvBWfmNx0yHjWGICic601xQ/fz2aGMS9rSY0Axuc root@ip-10-31-45-81.us-east-1.aws.redhat.com (ECDSA)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1373]: 256 SHA256:qZmTFysyuQx5KVcJ9E0aypB4seJF01PvkYlHkGp1qhM root@ip-10-31-45-81.us-east-1.aws.redhat.com (ED25519)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1381]: 3072 SHA256:eeiS5uhl91VJjf+uUTiYEKgItggkJpwIfTbmo87IRkE root@ip-10-31-45-81.us-east-1.aws.redhat.com (RSA)
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1383]: -----END SSH HOST KEY FINGERPRINTS-----
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1385]: #############################################################
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com cloud-init[1307]: Cloud-init v. 24.1.4-21.el10 finished at Mon, 06 Jan 2025 18:38:14 +0000. Datasource DataSourceEc2.  Up 136.06 seconds
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts.
â–‘â–‘ Subject: A start job for unit cloud-final.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-final.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 253.
Jan 06 13:38:14 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target.
â–‘â–‘ Subject: A start job for unit cloud-init.target has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit cloud-init.target has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 241.
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com kdumpctl[1044]: kdump: Rebuilding /boot/initramfs-6.12.0-36.el10.x86_64kdump.img
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1546]: dracut-103-1.el10
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1549]: Executing: /usr/bin/dracut --list-modules
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1620]: dracut-103-1.el10
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics  --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/d34a4578-e79c-4219-88ec-bb1d36b608ae /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.12.0-36.el10.x86_64kdump.img 6.12.0-36.el10.x86_64
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'ifcfg' will not be installed, because it's in the list to be omitted!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'plymouth' will not be installed, because it's in the list to be omitted!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'resume' will not be installed, because it's in the list to be omitted!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found!
Jan 06 13:38:15 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'earlykdump' will not be installed, because it's in the list to be omitted!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: fips ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: fips-crypto-policies ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-ask-password ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-initrd ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-journald ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-modules-load ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-sysctl ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-sysusers ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-tmpfiles ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: systemd-udevd ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: rngd ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: i18n ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: drm ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: prefixdevname ***
Jan 06 13:38:16 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: kernel-modules ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: kernel-modules-extra ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:   kernel-modules-extra: configuration source "/run/depmod.d" does not exist
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:   kernel-modules-extra: configuration source "/lib/depmod.d" does not exist
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:   kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf"
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:   kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: pcmcia ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Skipping udev rule: 60-pcmcia.rules
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: fstab-sys ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: hwdb ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: rootfs-block ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: squash-squashfs ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: terminfo ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: udev-rules ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: dracut-systemd ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: usrmount ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: base ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: fs-lib ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: kdumpbase ***
Jan 06 13:38:17 ip-10-31-45-81.us-east-1.aws.redhat.com chronyd[669]: Selected source 10.2.32.37
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: memstrack ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: microcode_ctl-fw_dir_override ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:   microcode_ctl module: mangling fw_dir
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:     microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware"
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel"...
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:       microcode_ctl: intel: caveats check for kernel version "6.12.0-36.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"...
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:     microcode_ctl: configuration "intel-06-4f-01" is ignored
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]:     microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware"
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: shutdown ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including module: squash-lib ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Including modules done ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Installing kernel module dependencies ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Installing kernel module dependencies done ***
Jan 06 13:38:18 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Resolving executable dependencies ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Resolving executable dependencies done ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Hardlinking files ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Mode:                     real
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Method:                   sha256
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Files:                    548
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Linked:                   25 files
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Compared:                 0 xattrs
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Compared:                 56 files
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Saved:                    13.59 MiB
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Duration:                 0.180187 seconds
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Hardlinking files done ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Generating early-microcode cpio image ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Constructing GenuineIntel.bin ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Constructing GenuineIntel.bin ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Store current command line parameters ***
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: Stored kernel commandline:
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: No dracut internal kernel commandline stored in the initramfs
Jan 06 13:38:19 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Squashing the files inside the initramfs ***
Jan 06 13:38:22 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jan 06 13:38:27 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Squashing the files inside the initramfs done ***
Jan 06 13:38:27 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Creating image file '/boot/initramfs-6.12.0-36.el10.x86_64kdump.img' ***
Jan 06 13:38:27 ip-10-31-45-81.us-east-1.aws.redhat.com dracut[1623]: *** Creating initramfs image file '/boot/initramfs-6.12.0-36.el10.x86_64kdump.img' done ***
Jan 06 13:38:27 ip-10-31-45-81.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window
Jan 06 13:38:28 ip-10-31-45-81.us-east-1.aws.redhat.com kdumpctl[1044]: kdump: kexec: loaded kdump kernel
Jan 06 13:38:28 ip-10-31-45-81.us-east-1.aws.redhat.com kdumpctl[1044]: kdump: Starting kdump: [OK]
Jan 06 13:38:28 ip-10-31-45-81.us-east-1.aws.redhat.com kdumpctl[1044]: kdump: Notice: No vmcore creation test performed!
Jan 06 13:38:28 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming.
â–‘â–‘ Subject: A start job for unit kdump.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit kdump.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 262.
Jan 06 13:38:28 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.022s (kernel) + 4.621s (initrd) + 2min 23.726s (userspace) = 2min 29.369s.
â–‘â–‘ Subject: System start-up is now complete
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ All system services necessary queued for starting at boot have been
â–‘â–‘ started. Note that this does not mean that the machine is now idle as services
â–‘â–‘ might still be busy with completing start-up.
â–‘â–‘ 
â–‘â–‘ Kernel start-up required 1022147 microseconds.
â–‘â–‘ 
â–‘â–‘ Initrd start-up required 4621550 microseconds.
â–‘â–‘ 
â–‘â–‘ Userspace start-up required 143726148 microseconds.
Jan 06 13:38:42 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4478]: Accepted publickey for root from 10.30.33.24 port 37600 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4478]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4478) opened.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0.
â–‘â–‘ Subject: A start job for unit user-0.slice has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit user-0.slice has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 684.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0...
â–‘â–‘ Subject: A start job for unit user-runtime-dir@0.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit user-runtime-dir@0.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 606.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: New session 1 of user root.
â–‘â–‘ Subject: A new session 1 has been created for user root
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A new session with the ID 1 has been created for the user root.
â–‘â–‘ 
â–‘â–‘ The leading process of the session is 4478.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0.
â–‘â–‘ Subject: A start job for unit user-runtime-dir@0.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit user-runtime-dir@0.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 606.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0...
â–‘â–‘ Subject: A start job for unit user@0.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit user@0.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 686.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: New session 2 of user root.
â–‘â–‘ Subject: A new session 2 has been created for user root
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A new session with the ID 2 has been created for the user root.
â–‘â–‘ 
â–‘â–‘ The leading process of the session is 4483.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com (systemd)[4483]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Queued start job for default target default.target.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Created slice app.slice - User Application Slice.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 9.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 4.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 6.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Reached target paths.target - Paths.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 12.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Reached target timers.target - Timers.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 3.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Starting dbus.socket - D-Bus User Message Bus Socket...
â–‘â–‘ Subject: A start job for unit UNIT has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 8.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...
â–‘â–‘ Subject: A start job for unit UNIT has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 11.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 11.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Listening on dbus.socket - D-Bus User Message Bus Socket.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 8.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Reached target sockets.target - Sockets.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 7.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Reached target basic.target - Basic System.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Reached target default.target - Main User Target.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[4483]: Startup finished in 108ms.
â–‘â–‘ Subject: User manager start-up is now complete
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The user manager instance for user 0 has been started. All services queued
â–‘â–‘ for starting have been started. Note that other services might still be starting
â–‘â–‘ up or be started at any later time.
â–‘â–‘ 
â–‘â–‘ Startup of the manager took 108437 microseconds.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0.
â–‘â–‘ Subject: A start job for unit user@0.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit user@0.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 686.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root.
â–‘â–‘ Subject: A start job for unit session-1.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit session-1.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 767.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4478]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4494]: Received disconnect from 10.30.33.24 port 37600:11: disconnected by user
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4494]: Disconnected from user root 10.30.33.24 port 37600
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4478]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4478) opened.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4478]: pam_unix(sshd:session): session closed for user root
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit session-1.scope has successfully entered the 'dead' state.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: Session 1 logged out. Waiting for processes to exit.
Jan 06 13:38:58 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: Removed session 1.
â–‘â–‘ Subject: Session 1 has been terminated
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A session with the ID 1 has been terminated.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4530]: Accepted publickey for root from 10.31.8.129 port 35478 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4530]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4530) opened.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4531]: Accepted publickey for root from 10.31.8.129 port 35480 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: New session 3 of user root.
â–‘â–‘ Subject: A new session 3 has been created for user root
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A new session with the ID 3 has been created for the user root.
â–‘â–‘ 
â–‘â–‘ The leading process of the session is 4530.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root.
â–‘â–‘ Subject: A start job for unit session-3.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit session-3.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 849.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4531]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4531) opened.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4530]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: New session 4 of user root.
â–‘â–‘ Subject: A new session 4 has been created for user root
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A new session with the ID 4 has been created for the user root.
â–‘â–‘ 
â–‘â–‘ The leading process of the session is 4531.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root.
â–‘â–‘ Subject: A start job for unit session-4.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit session-4.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 931.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4531]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4537]: Received disconnect from 10.31.8.129 port 35480:11: disconnected by user
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4537]: Disconnected from user root 10.31.8.129 port 35480
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4531]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4531) opened.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com sshd-session[4531]: pam_unix(sshd:session): session closed for user root
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit session-4.scope has successfully entered the 'dead' state.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: Session 4 logged out. Waiting for processes to exit.
Jan 06 13:39:01 ip-10-31-45-81.us-east-1.aws.redhat.com systemd-logind[647]: Removed session 4.
â–‘â–‘ Subject: Session 4 has been terminated
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A session with the ID 4 has been terminated.
Jan 06 13:39:10 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
â–‘â–‘ Subject: A start job for unit systemd-hostnamed.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-hostnamed.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1013.
Jan 06 13:39:10 ip-10-31-45-81.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service.
â–‘â–‘ Subject: A start job for unit systemd-hostnamed.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit systemd-hostnamed.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1013.
Jan 06 13:39:10 managed-node3 systemd-hostnamed[5985]: Hostname set to <managed-node3> (static)
Jan 06 13:39:10 managed-node3 NetworkManager[778]: <info>  [1736188750.9348] hostname: static hostname changed from "ip-10-31-45-81.us-east-1.aws.redhat.com" to "managed-node3"
Jan 06 13:39:10 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1091.
Jan 06 13:39:10 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1091.
Jan 06 13:39:20 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jan 06 13:39:40 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Jan 06 13:39:51 managed-node3 sshd-session[6646]: Accepted publickey for root from 10.31.42.234 port 35324 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE
Jan 06 13:39:51 managed-node3 sshd-session[6646]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-6646) opened.
Jan 06 13:39:51 managed-node3 systemd-logind[647]: New session 5 of user root.
â–‘â–‘ Subject: A new session 5 has been created for user root
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ Documentation: sd-login(3)
â–‘â–‘ 
â–‘â–‘ A new session with the ID 5 has been created for the user root.
â–‘â–‘ 
â–‘â–‘ The leading process of the session is 6646.
Jan 06 13:39:51 managed-node3 systemd[1]: Started session-5.scope - Session 5 of User root.
â–‘â–‘ Subject: A start job for unit session-5.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit session-5.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1170.
Jan 06 13:39:51 managed-node3 sshd-session[6646]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:39:53 managed-node3 python3.12[6795]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jan 06 13:39:54 managed-node3 python3.12[6949]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:39:54 managed-node3 python3.12[7074]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:39:56 managed-node3 sudo[7324]:     root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tgufyqffannoscxzoempumdwpcxiwhhm ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1736188796.3001418-7229-84572163885533/AnsiballZ_dnf.py'
Jan 06 13:39:56 managed-node3 sudo[7324]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-7324) opened.
Jan 06 13:39:56 managed-node3 sudo[7324]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0)
Jan 06 13:39:56 managed-node3 python3.12[7327]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jan 06 13:40:17 managed-node3 kernel: SELinux:  Converting 382 SID table entries...
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability network_peer_controls=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability open_perms=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability extended_socket_class=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability always_check_network=0
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability cgroup_seclabel=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Jan 06 13:40:17 managed-node3 kernel: SELinux:  policy capability userspace_initial_context=0
Jan 06 13:40:24 managed-node3 kernel: SELinux:  Converting 383 SID table entries...
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability network_peer_controls=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability open_perms=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability extended_socket_class=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability always_check_network=0
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability cgroup_seclabel=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Jan 06 13:40:24 managed-node3 kernel: SELinux:  policy capability userspace_initial_context=0
Jan 06 13:40:31 managed-node3 kernel: SELinux:  Converting 383 SID table entries...
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability network_peer_controls=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability open_perms=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability extended_socket_class=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability always_check_network=0
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability cgroup_seclabel=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Jan 06 13:40:31 managed-node3 kernel: SELinux:  policy capability userspace_initial_context=0
Jan 06 13:40:32 managed-node3 setsebool[7405]: The virt_use_nfs policy boolean was changed to 1 by root
Jan 06 13:40:32 managed-node3 setsebool[7405]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root
Jan 06 13:40:40 managed-node3 kernel: SELinux:  Converting 387 SID table entries...
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability network_peer_controls=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability open_perms=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability extended_socket_class=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability always_check_network=0
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability cgroup_seclabel=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Jan 06 13:40:40 managed-node3 kernel: SELinux:  policy capability userspace_initial_context=0
Jan 06 13:40:48 managed-node3 kernel: SELinux:  Converting 387 SID table entries...
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability network_peer_controls=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability open_perms=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability extended_socket_class=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability always_check_network=0
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability cgroup_seclabel=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Jan 06 13:40:48 managed-node3 kernel: SELinux:  policy capability userspace_initial_context=0
Jan 06 13:40:48 managed-node3 groupadd[7452]: group added to /etc/group: name=polkitd, GID=114
Jan 06 13:40:48 managed-node3 groupadd[7452]: group added to /etc/gshadow: name=polkitd
Jan 06 13:40:48 managed-node3 groupadd[7452]: new group: name=polkitd, GID=114
Jan 06 13:40:48 managed-node3 useradd[7455]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none
Jan 06 13:40:48 managed-node3 dbus-broker-launch[632]: Noticed file-system modification, trigger reload.
â–‘â–‘ Subject: A configuration directory was written to
â–‘â–‘ Defined-By: dbus-broker
â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel
â–‘â–‘ 
â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration
â–‘â–‘ files, triggering a configuration reload.
â–‘â–‘ 
â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to
â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when
â–‘â–‘ installing or removing third-party software causes D-Bus configuration files
â–‘â–‘ to be added or removed.
â–‘â–‘ 
â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in
â–‘â–‘ case dispatching this notification races with the writing of the configuration
â–‘â–‘ files. However, a future notification will then cause the configuration to be
â–‘â–‘ reladed again.
Jan 06 13:40:48 managed-node3 dbus-broker-launch[632]: Noticed file-system modification, trigger reload.
â–‘â–‘ Subject: A configuration directory was written to
â–‘â–‘ Defined-By: dbus-broker
â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel
â–‘â–‘ 
â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration
â–‘â–‘ files, triggering a configuration reload.
â–‘â–‘ 
â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to
â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when
â–‘â–‘ installing or removing third-party software causes D-Bus configuration files
â–‘â–‘ to be added or removed.
â–‘â–‘ 
â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in
â–‘â–‘ case dispatching this notification races with the writing of the configuration
â–‘â–‘ files. However, a future notification will then cause the configuration to be
â–‘â–‘ reladed again.
Jan 06 13:40:48 managed-node3 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket.
â–‘â–‘ Subject: A start job for unit pcscd.socket has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit pcscd.socket has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1254.
Jan 06 13:41:05 managed-node3 systemd[1]: Started run-r06deb55605e943df902b2ba060adfef1.service - /usr/bin/systemctl start man-db-cache-update.
â–‘â–‘ Subject: A start job for unit run-r06deb55605e943df902b2ba060adfef1.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit run-r06deb55605e943df902b2ba060adfef1.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1332.
Jan 06 13:41:05 managed-node3 systemctl[8147]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units.
Jan 06 13:41:05 managed-node3 systemd[1]: Starting man-db-cache-update.service...
â–‘â–‘ Subject: A start job for unit man-db-cache-update.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit man-db-cache-update.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1410.
Jan 06 13:41:05 managed-node3 systemd[1]: Reload requested from client PID 8150 ('systemctl') (unit session-5.scope)...
Jan 06 13:41:05 managed-node3 systemd[1]: Reloading...
Jan 06 13:41:05 managed-node3 systemd[1]: Reloading finished in 254 ms.
Jan 06 13:41:05 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units…
Jan 06 13:41:06 managed-node3 sudo[7324]: pam_unix(sudo:session): session closed for user root
Jan 06 13:41:07 managed-node3 python3.12[8691]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:07 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Jan 06 13:41:07 managed-node3 systemd[1]: Finished man-db-cache-update.service.
â–‘â–‘ Subject: A start job for unit man-db-cache-update.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit man-db-cache-update.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1410.
Jan 06 13:41:08 managed-node3 python3.12[8829]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jan 06 13:41:08 managed-node3 systemd[1]: run-r06deb55605e943df902b2ba060adfef1.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit run-r06deb55605e943df902b2ba060adfef1.service has successfully entered the 'dead' state.
Jan 06 13:41:08 managed-node3 python3.12[8964]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:10 managed-node3 python3.12[9097]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None
Jan 06 13:41:10 managed-node3 python3.12[9228]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_iipwthxc/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:11 managed-node3 python3.12[9360]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Jan 06 13:41:11 managed-node3 useradd[9362]: new group: name=user1, GID=1000
Jan 06 13:41:11 managed-node3 useradd[9362]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0
Jan 06 13:41:12 managed-node3 python3.12[9624]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:13 managed-node3 python3.12[9761]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None
Jan 06 13:41:13 managed-node3 python3.12[9893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:14 managed-node3 python3.12[10026]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:14 managed-node3 python3.12[10158]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:15 managed-node3 python3.12[10290]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:15 managed-node3 python3.12[10421]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:16 managed-node3 python3.12[10526]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188875.5066304-8128-221502857937955/.source.conf dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=94370d6e765779f1c58daf02f667b8f0b74d91f6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:16 managed-node3 python3.12[10657]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:16 managed-node3 python3.12[10788]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:17 managed-node3 python3.12[10893]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188876.7288487-8159-229924587883718/.source.conf dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=dfb9cd7094a81b3d1bb06512cc9b49a09c75639b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:17 managed-node3 python3.12[11024]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:18 managed-node3 python3.12[11155]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:18 managed-node3 python3.12[11260]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188877.837156-8190-120872794415642/.source.conf dest=/home/user1/.config/containers/storage.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=d08574b6a1df63dbe1c939ff0bcc7c0b61d03044 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:18 managed-node3 python3.12[11391]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:19 managed-node3 python3.12[11522]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:19 managed-node3 python3.12[11653]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:19 managed-node3 python3.12[11758]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736188879.3449028-8230-40881165979046/.source.json _original_basename=.97ioajxe follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:20 managed-node3 python3.12[11889]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:20 managed-node3 python3.12[12022]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:21 managed-node3 python3.12[12155]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:21 managed-node3 python3.12[12288]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:23 managed-node3 python3.12[12552]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:24 managed-node3 python3.12[12689]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:25 managed-node3 python3.12[12822]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:25 managed-node3 python3.12[12954]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:26 managed-node3 python3.12[13086]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:27 managed-node3 python3.12[13217]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:27 managed-node3 python3.12[13283]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:28 managed-node3 python3.12[13414]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:28 managed-node3 python3.12[13545]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:28 managed-node3 python3.12[13611]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:29 managed-node3 python3.12[13742]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:29 managed-node3 python3.12[13873]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:30 managed-node3 python3.12[13939]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:30 managed-node3 python3.12[14070]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:31 managed-node3 python3.12[14201]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:31 managed-node3 python3.12[14334]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json
Jan 06 13:41:32 managed-node3 python3.12[14465]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:32 managed-node3 python3.12[14598]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:33 managed-node3 python3.12[14731]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:33 managed-node3 python3.12[14864]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:35 managed-node3 python3.12[15128]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:36 managed-node3 python3.12[15266]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jan 06 13:41:36 managed-node3 python3.12[15398]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:37 managed-node3 python3.12[15531]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:37 managed-node3 python3.12[15662]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:38 managed-node3 python3.12[15767]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188897.667124-8973-68423769199211/.source.conf dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=94370d6e765779f1c58daf02f667b8f0b74d91f6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:38 managed-node3 python3.12[15898]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:39 managed-node3 python3.12[16029]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:39 managed-node3 python3.12[16134]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188898.8826015-9028-127185134774285/.source.conf dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=dfb9cd7094a81b3d1bb06512cc9b49a09c75639b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:40 managed-node3 python3.12[16265]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:40 managed-node3 python3.12[16396]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:40 managed-node3 python3.12[16501]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188900.245033-9087-106320565065434/.source.conf dest=/etc/containers/storage.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=d08574b6a1df63dbe1c939ff0bcc7c0b61d03044 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:41 managed-node3 python3.12[16632]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:41 managed-node3 python3.12[16763]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:42 managed-node3 python3.12[16896]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jan 06 13:41:42 managed-node3 python3.12[17027]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:42 managed-node3 python3.12[17134]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736188902.1928573-9177-42035263478033/.source.json _original_basename=.vjw72ncq follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:43 managed-node3 python3.12[17265]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:43 managed-node3 python3.12[17398]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:44 managed-node3 python3.12[17531]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:44 managed-node3 python3.12[17664]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:46 managed-node3 python3.12[17928]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:41:48 managed-node3 python3.12[18066]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:49 managed-node3 python3.12[18199]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:49 managed-node3 python3.12[18330]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:49 managed-node3 python3.12[18396]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:50 managed-node3 python3.12[18527]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:50 managed-node3 python3.12[18658]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:50 managed-node3 python3.12[18724]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:51 managed-node3 python3.12[18855]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:51 managed-node3 python3.12[18986]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:41:51 managed-node3 python3.12[19052]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:52 managed-node3 python3.12[19183]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:52 managed-node3 python3.12[19314]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:53 managed-node3 python3.12[19447]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jan 06 13:41:54 managed-node3 python3.12[19578]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:54 managed-node3 python3.12[19711]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:55 managed-node3 python3.12[19844]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:55 managed-node3 python3.12[19977]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:41:56 managed-node3 python3.12[20110]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf
Jan 06 13:41:56 managed-node3 python3.12[20241]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf
Jan 06 13:41:57 managed-node3 python3.12[20372]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf
Jan 06 13:41:57 managed-node3 python3.12[20503]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf
Jan 06 13:41:58 managed-node3 python3.12[20634]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf
Jan 06 13:41:58 managed-node3 python3.12[20765]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf
Jan 06 13:41:59 managed-node3 python3.12[20896]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:41:59 managed-node3 python3.12[21027]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:00 managed-node3 python3.12[21158]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:00 managed-node3 python3.12[21289]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:00 managed-node3 python3.12[21420]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:01 managed-node3 python3.12[21551]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:01 managed-node3 python3.12[21682]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:01 managed-node3 python3.12[21813]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:02 managed-node3 python3.12[21944]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_iipwthxc/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:42:02 managed-node3 python3.12[22076]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_iipwthxc recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:05 managed-node3 python3.12[22250]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jan 06 13:42:05 managed-node3 python3.12[22383]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:06 managed-node3 python3.12[22514]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:08 managed-node3 python3.12[22776]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:42:09 managed-node3 python3.12[22914]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jan 06 13:42:09 managed-node3 python3.12[23046]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:13 managed-node3 python3.12[23222]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jan 06 13:42:18 managed-node3 python3.12[23382]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:18 managed-node3 python3.12[23513]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:21 managed-node3 python3.12[23775]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:42:22 managed-node3 python3.12[23912]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jan 06 13:42:22 managed-node3 python3.12[24044]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:30 managed-node3 python3.12[24220]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jan 06 13:42:31 managed-node3 python3.12[24380]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:32 managed-node3 python3.12[24511]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jan 06 13:42:34 managed-node3 python3.12[24647]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jan 06 13:42:36 managed-node3 dbus-broker-launch[632]: Noticed file-system modification, trigger reload.
â–‘â–‘ Subject: A configuration directory was written to
â–‘â–‘ Defined-By: dbus-broker
â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel
â–‘â–‘ 
â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration
â–‘â–‘ files, triggering a configuration reload.
â–‘â–‘ 
â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to
â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when
â–‘â–‘ installing or removing third-party software causes D-Bus configuration files
â–‘â–‘ to be added or removed.
â–‘â–‘ 
â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in
â–‘â–‘ case dispatching this notification races with the writing of the configuration
â–‘â–‘ files. However, a future notification will then cause the configuration to be
â–‘â–‘ reladed again.
Jan 06 13:42:36 managed-node3 dbus-broker-launch[632]: Noticed file-system modification, trigger reload.
â–‘â–‘ Subject: A configuration directory was written to
â–‘â–‘ Defined-By: dbus-broker
â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel
â–‘â–‘ 
â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration
â–‘â–‘ files, triggering a configuration reload.
â–‘â–‘ 
â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to
â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when
â–‘â–‘ installing or removing third-party software causes D-Bus configuration files
â–‘â–‘ to be added or removed.
â–‘â–‘ 
â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in
â–‘â–‘ case dispatching this notification races with the writing of the configuration
â–‘â–‘ files. However, a future notification will then cause the configuration to be
â–‘â–‘ reladed again.
Jan 06 13:42:36 managed-node3 dbus-broker-launch[632]: Noticed file-system modification, trigger reload.
â–‘â–‘ Subject: A configuration directory was written to
â–‘â–‘ Defined-By: dbus-broker
â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel
â–‘â–‘ 
â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration
â–‘â–‘ files, triggering a configuration reload.
â–‘â–‘ 
â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to
â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when
â–‘â–‘ installing or removing third-party software causes D-Bus configuration files
â–‘â–‘ to be added or removed.
â–‘â–‘ 
â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in
â–‘â–‘ case dispatching this notification races with the writing of the configuration
â–‘â–‘ files. However, a future notification will then cause the configuration to be
â–‘â–‘ reladed again.
Jan 06 13:42:36 managed-node3 systemd[1]: Reload requested from client PID 24655 ('systemctl') (unit session-5.scope)...
Jan 06 13:42:36 managed-node3 systemd[1]: Reloading...
Jan 06 13:42:36 managed-node3 systemd[1]: Reloading finished in 176 ms.
Jan 06 13:42:36 managed-node3 systemd[1]: Started run-r4fb640519d9d49dfb9007e8465bc5731.service - /usr/bin/systemctl start man-db-cache-update.
â–‘â–‘ Subject: A start job for unit run-r4fb640519d9d49dfb9007e8465bc5731.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit run-r4fb640519d9d49dfb9007e8465bc5731.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1492.
Jan 06 13:42:36 managed-node3 systemd[1]: Starting man-db-cache-update.service...
â–‘â–‘ Subject: A start job for unit man-db-cache-update.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit man-db-cache-update.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1570.
Jan 06 13:42:36 managed-node3 systemd[1]: Reload requested from client PID 24716 ('systemctl') (unit session-5.scope)...
Jan 06 13:42:36 managed-node3 systemd[1]: Reloading...
Jan 06 13:42:36 managed-node3 systemd[1]: Reloading finished in 279 ms.
Jan 06 13:42:36 managed-node3 systemd[1]: Starting logrotate.service - Rotate log files...
â–‘â–‘ Subject: A start job for unit logrotate.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit logrotate.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1648.
Jan 06 13:42:36 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units…
Jan 06 13:42:36 managed-node3 systemd[1]: logrotate.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit logrotate.service has successfully entered the 'dead' state.
Jan 06 13:42:36 managed-node3 systemd[1]: Finished logrotate.service - Rotate log files.
â–‘â–‘ Subject: A start job for unit logrotate.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit logrotate.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1648.
Jan 06 13:42:37 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Jan 06 13:42:37 managed-node3 systemd[1]: Finished man-db-cache-update.service.
â–‘â–‘ Subject: A start job for unit man-db-cache-update.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit man-db-cache-update.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1570.
Jan 06 13:42:37 managed-node3 systemd[1]: run-r4fb640519d9d49dfb9007e8465bc5731.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit run-r4fb640519d9d49dfb9007e8465bc5731.service has successfully entered the 'dead' state.
Jan 06 13:42:37 managed-node3 python3.12[24911]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:38 managed-node3 python3.12[25042]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:38 managed-node3 python3.12[25173]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jan 06 13:42:38 managed-node3 systemd[1]: Reload requested from client PID 25176 ('systemctl') (unit session-5.scope)...
Jan 06 13:42:38 managed-node3 systemd[1]: Reloading...
Jan 06 13:42:39 managed-node3 systemd[1]: Reloading finished in 181 ms.
Jan 06 13:42:39 managed-node3 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment...
â–‘â–‘ Subject: A start job for unit certmonger.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit certmonger.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1726.
Jan 06 13:42:39 managed-node3 (rtmonger)[25230]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS
Jan 06 13:42:39 managed-node3 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment.
â–‘â–‘ Subject: A start job for unit certmonger.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit certmonger.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1726.
Jan 06 13:42:39 managed-node3 python3.12[25388]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
                                                 # Ansible managed
                                                 #
                                                 # system_role:certificate
                                                  provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:39 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:39 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:40 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:40 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 certmonger[25403]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved.
Jan 06 13:42:40 managed-node3 certmonger[25230]: 2025-01-06 13:42:40 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:40 managed-node3 python3.12[25534]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jan 06 13:42:40 managed-node3 python3.12[25665]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key
Jan 06 13:42:41 managed-node3 python3.12[25796]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jan 06 13:42:41 managed-node3 python3.12[25927]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:42:42 managed-node3 certmonger[25230]: 2025-01-06 13:42:42 [25230] Wrote to /var/lib/certmonger/requests/20250106184239
Jan 06 13:42:42 managed-node3 python3.12[26059]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:42 managed-node3 python3.12[26190]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:43 managed-node3 python3.12[26321]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:42:44 managed-node3 python3.12[26452]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:44 managed-node3 python3.12[26583]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:46 managed-node3 python3.12[26845]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:42:47 managed-node3 python3.12[26982]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jan 06 13:42:48 managed-node3 python3.12[27114]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:51 managed-node3 python3.12[27247]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:51 managed-node3 python3.12[27378]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:42:52 managed-node3 python3.12[27509]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jan 06 13:42:53 managed-node3 python3.12[27641]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Jan 06 13:42:54 managed-node3 python3.12[27774]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jan 06 13:42:54 managed-node3 systemd[1]: Reload requested from client PID 27777 ('systemctl') (unit session-5.scope)...
Jan 06 13:42:54 managed-node3 systemd[1]: Reloading...
Jan 06 13:42:54 managed-node3 systemd[1]: Reloading finished in 180 ms.
Jan 06 13:42:54 managed-node3 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon...
â–‘â–‘ Subject: A start job for unit firewalld.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit firewalld.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1805.
Jan 06 13:42:54 managed-node3 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon.
â–‘â–‘ Subject: A start job for unit firewalld.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit firewalld.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1805.
Jan 06 13:42:54 managed-node3 kernel: Warning: Unmaintained driver is detected: ip_set
Jan 06 13:42:55 managed-node3 systemd[1]: Starting polkit.service - Authorization Manager...
â–‘â–‘ Subject: A start job for unit polkit.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit polkit.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1889.
Jan 06 13:42:55 managed-node3 polkitd[27943]: Started polkitd version 125
Jan 06 13:42:55 managed-node3 systemd[1]: Started polkit.service - Authorization Manager.
â–‘â–‘ Subject: A start job for unit polkit.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit polkit.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1889.
Jan 06 13:42:55 managed-node3 python3.12[28016]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jan 06 13:42:56 managed-node3 python3.12[28147]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jan 06 13:42:56 managed-node3 rsyslogd[645]: imjournal: journal files changed, reloading...  [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ]
Jan 06 13:42:59 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat3197202625-merged.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay-compat3197202625-merged.mount has successfully entered the 'dead' state.
Jan 06 13:42:59 managed-node3 kernel: evm: overlay not supported
Jan 06 13:42:59 managed-node3 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck325459512-merged.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck325459512-merged.mount has successfully entered the 'dead' state.
Jan 06 13:42:59 managed-node3 podman[28287]: 2025-01-06 13:42:59.535006444 -0500 EST m=+0.073540181 system refresh
Jan 06 13:43:00 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:01 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:01 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:02 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:03 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:03 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:04 managed-node3 python3.12[28756]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:05 managed-node3 python3.12[28889]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:06 managed-node3 python3.12[29020]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:06 managed-node3 python3.12[29125]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188985.913371-13096-237285315394433/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:07 managed-node3 python3.12[29256]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:07 managed-node3 systemd[1]: Reload requested from client PID 29257 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:07 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:07 managed-node3 systemd[1]: Reloading finished in 187 ms.
Jan 06 13:43:07 managed-node3 python3.12[29441]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jan 06 13:43:08 managed-node3 systemd[1]: Starting quadlet-demo-network.service...
â–‘â–‘ Subject: A start job for unit quadlet-demo-network.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-network.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1969.
Jan 06 13:43:08 managed-node3 quadlet-demo-network[29445]: systemd-quadlet-demo
Jan 06 13:43:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:08 managed-node3 systemd[1]: Finished quadlet-demo-network.service.
â–‘â–‘ Subject: A start job for unit quadlet-demo-network.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-network.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 1969.
Jan 06 13:43:09 managed-node3 python3.12[29583]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:10 managed-node3 python3.12[29716]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:11 managed-node3 python3.12[29847]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:11 managed-node3 python3.12[29952]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736188991.0438864-13301-179211539160969/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:12 managed-node3 python3.12[30083]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:12 managed-node3 systemd[1]: Reload requested from client PID 30084 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:12 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:12 managed-node3 systemd[1]: Reloading finished in 184 ms.
Jan 06 13:43:13 managed-node3 python3.12[30267]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jan 06 13:43:13 managed-node3 systemd[1]: Starting quadlet-demo-mysql-volume.service...
â–‘â–‘ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-mysql-volume.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2053.
Jan 06 13:43:13 managed-node3 podman[30271]: 2025-01-06 13:43:13.144066847 -0500 EST m=+0.027237563 volume create systemd-quadlet-demo-mysql
Jan 06 13:43:13 managed-node3 quadlet-demo-mysql-volume[30271]: systemd-quadlet-demo-mysql
Jan 06 13:43:13 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:13 managed-node3 systemd[1]: Finished quadlet-demo-mysql-volume.service.
â–‘â–‘ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-mysql-volume.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2053.
Jan 06 13:43:14 managed-node3 python3.12[30410]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:16 managed-node3 python3.12[30543]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:16 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:19 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat2232989401-lower\x2dmapped.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay-compat2232989401-lower\x2dmapped.mount has successfully entered the 'dead' state.
Jan 06 13:43:23 managed-node3 podman[30683]: 2025-01-06 13:43:23.26264495 -0500 EST m=+6.314460256 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Jan 06 13:43:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jan 06 13:43:23 managed-node3 python3.12[30996]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:24 managed-node3 python3.12[31127]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:24 managed-node3 python3.12[31232]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736189003.926421-13821-120873509621603/.source.container _original_basename=.dusnlqgl follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:25 managed-node3 python3.12[31363]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:25 managed-node3 systemd[1]: Reload requested from client PID 31364 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:25 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:25 managed-node3 systemd[1]: Reloading finished in 187 ms.
Jan 06 13:43:25 managed-node3 python3.12[31547]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jan 06 13:43:25 managed-node3 systemd[1]: Starting quadlet-demo-mysql.service...
â–‘â–‘ Subject: A start job for unit quadlet-demo-mysql.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-mysql.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2137.
Jan 06 13:43:26 managed-node3 podman[31551]: 2025-01-06 13:43:26.016853204 -0500 EST m=+0.046274200 container create a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:43:26 managed-node3 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.0655] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)
Jan 06 13:43:26 managed-node3 kernel: podman1: port 1(veth0) entered blocking state
Jan 06 13:43:26 managed-node3 kernel: podman1: port 1(veth0) entered disabled state
Jan 06 13:43:26 managed-node3 kernel: veth0: entered allmulticast mode
Jan 06 13:43:26 managed-node3 kernel: veth0: entered promiscuous mode
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.0787] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)
Jan 06 13:43:26 managed-node3 kernel: podman1: port 1(veth0) entered blocking state
Jan 06 13:43:26 managed-node3 kernel: podman1: port 1(veth0) entered forwarding state
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.0809] device (veth0): carrier: link connected
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.0813] device (podman1): carrier: link connected
Jan 06 13:43:26 managed-node3 podman[31551]: 2025-01-06 13:43:25.997958077 -0500 EST m=+0.027379164 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Jan 06 13:43:26 managed-node3 (udev-worker)[31569]: Network interface NamePolicy= disabled on kernel command line.
Jan 06 13:43:26 managed-node3 (udev-worker)[31572]: Network interface NamePolicy= disabled on kernel command line.
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1231] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1241] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1251] device (podman1): Activation: starting connection 'podman1' (718ded4d-571d-4ca8-9a99-200f88fc8406)
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1253] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1259] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1263] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1267] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2224.
Jan 06 13:43:26 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2224.
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1708] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1712] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Jan 06 13:43:26 managed-node3 NetworkManager[778]: <info>  [1736189006.1718] device (podman1): Activation: successful, device activated.
Jan 06 13:43:26 managed-node3 systemd[1]: Started run-rf36dd3ebf4744f0b803e4636ede4c94f.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.
â–‘â–‘ Subject: A start job for unit run-rf36dd3ebf4744f0b803e4636ede4c94f.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit run-rf36dd3ebf4744f0b803e4636ede4c94f.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2303.
Jan 06 13:43:26 managed-node3 systemd[1]: Started a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.timer - /usr/bin/podman healthcheck run a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf.
â–‘â–‘ Subject: A start job for unit a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.timer has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.timer has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2309.
Jan 06 13:43:26 managed-node3 podman[31551]: 2025-01-06 13:43:26.289110639 -0500 EST m=+0.318531683 container init a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:43:26 managed-node3 systemd[1]: Started quadlet-demo-mysql.service.
â–‘â–‘ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo-mysql.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2137.
Jan 06 13:43:26 managed-node3 podman[31551]: 2025-01-06 13:43:26.315603272 -0500 EST m=+0.345024377 container start a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:43:26 managed-node3 quadlet-demo-mysql[31551]: a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf
Jan 06 13:43:26 managed-node3 podman[31620]: 2025-01-06 13:43:26.489500264 -0500 EST m=+0.145361667 container health_status a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:43:27 managed-node3 python3.12[31815]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:29 managed-node3 python3.12[31959]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:29 managed-node3 python3.12[32090]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:29 managed-node3 python3.12[32195]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189009.173875-14061-170593450852424/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:30 managed-node3 python3.12[32350]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:30 managed-node3 systemd[1]: Reload requested from client PID 32351 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:30 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:30 managed-node3 systemd[1]: Reloading finished in 195 ms.
Jan 06 13:43:32 managed-node3 python3.12[32542]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:33 managed-node3 python3.12[32705]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:34 managed-node3 python3.12[32836]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:34 managed-node3 python3.12[32948]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736189013.8411329-14251-232144668374786/.source.yml _original_basename=.4jxwsm9u follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:35 managed-node3 python3.12[33079]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:35 managed-node3 systemd[1]: Reload requested from client PID 33080 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:35 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:35 managed-node3 systemd[1]: Reloading finished in 193 ms.
Jan 06 13:43:36 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jan 06 13:43:36 managed-node3 python3.12[33288]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jan 06 13:43:38 managed-node3 python3.12[33421]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml
Jan 06 13:43:39 managed-node3 python3.12[33552]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:39 managed-node3 python3.12[33683]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:52 managed-node3 podman[33822]: 2025-01-06 13:43:52.41014808 -0500 EST m=+12.417104095 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Jan 06 13:43:56 managed-node3 podman[34305]: 2025-01-06 13:43:56.740153774 -0500 EST m=+0.168587649 container health_status a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:43:57 managed-node3 podman[34239]: 2025-01-06 13:43:57.340367481 -0500 EST m=+4.479997306 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Jan 06 13:43:57 managed-node3 python3.12[34519]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:58 managed-node3 python3.12[34650]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jan 06 13:43:58 managed-node3 python3.12[34755]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189037.9919684-15082-277151472698982/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:43:59 managed-node3 python3.12[34886]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jan 06 13:43:59 managed-node3 systemd[1]: Reload requested from client PID 34887 ('systemctl') (unit session-5.scope)...
Jan 06 13:43:59 managed-node3 systemd[1]: Reloading...
Jan 06 13:43:59 managed-node3 systemd[1]: Reloading finished in 198 ms.
Jan 06 13:44:00 managed-node3 python3.12[35070]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jan 06 13:44:00 managed-node3 systemd[1]: Starting quadlet-demo.service...
â–‘â–‘ Subject: A start job for unit quadlet-demo.service has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo.service has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2543.
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Pods stopped:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Pods removed:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Secrets removed:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Volumes removed:
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.183187798 -0500 EST m=+0.030667094 volume create wp-pv-claim
Jan 06 13:44:00 managed-node3 systemd[4483]: Created slice background.slice - User Background Tasks Slice.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 14.
Jan 06 13:44:00 managed-node3 systemd[4483]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories...
â–‘â–‘ Subject: A start job for unit UNIT has begun execution
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has begun execution.
â–‘â–‘ 
â–‘â–‘ The job identifier is 13.
Jan 06 13:44:00 managed-node3 systemd[4483]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories.
â–‘â–‘ Subject: A start job for unit UNIT has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit UNIT has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 13.
Jan 06 13:44:00 managed-node3 systemd[1]: var-lib-containers-storage-overlay-d1633a14c740979c6a029b606a0be7a289c1381ccf1da919da3a1710c8f15f88-merged.mount: Deactivated successfully.
â–‘â–‘ Subject: Unit succeeded
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ The unit var-lib-containers-storage-overlay-d1633a14c740979c6a029b606a0be7a289c1381ccf1da919da3a1710c8f15f88-merged.mount has successfully entered the 'dead' state.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.451465367 -0500 EST m=+0.298944771 image build  686eb9c9ce6c0454f7e2fa69c05872ef40a18e824a8fdf257567470dc5641f72
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.474235542 -0500 EST m=+0.321714862 container create 5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.481812259 -0500 EST m=+0.329291581 volume create envoy-proxy-config
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.488696306 -0500 EST m=+0.336175601 volume create envoy-certificates
Jan 06 13:44:00 managed-node3 systemd[1]: Created slice machine.slice - Slice /machine.
â–‘â–‘ Subject: A start job for unit machine.slice has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit machine.slice has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2631.
Jan 06 13:44:00 managed-node3 systemd[1]: Created slice machine-libpod_pod_36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922.slice - cgroup machine-libpod_pod_36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922.slice.
â–‘â–‘ Subject: A start job for unit machine-libpod_pod_36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922.slice has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit machine-libpod_pod_36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922.slice has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2630.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.533168387 -0500 EST m=+0.380647710 container create 48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce (image=localhost/podman-pause:5.3.1-1733097600, name=36d3302583a7-infra, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.539146831 -0500 EST m=+0.386626128 pod create 36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922 (image=, name=quadlet-demo)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.548341949 -0500 EST m=+0.395821306 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.577926765 -0500 EST m=+0.425406105 container create 9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.604547291 -0500 EST m=+0.452026689 container create b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.605246799 -0500 EST m=+0.452726355 container restart 5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 systemd[1]: Started libpod-5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d.scope - libcrun container.
â–‘â–‘ Subject: A start job for unit libpod-5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit libpod-5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2636.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.667025111 -0500 EST m=+0.514504463 container init 5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.669390073 -0500 EST m=+0.516869515 container start 5b2c81a505bbc06f1b6ec17b88af113083e1323bc20797004d0a74debddd0c1d (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.58139481 -0500 EST m=+0.428874275 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Jan 06 13:44:00 managed-node3 kernel: podman1: port 2(veth1) entered blocking state
Jan 06 13:44:00 managed-node3 kernel: podman1: port 2(veth1) entered disabled state
Jan 06 13:44:00 managed-node3 kernel: veth1: entered allmulticast mode
Jan 06 13:44:00 managed-node3 kernel: veth1: entered promiscuous mode
Jan 06 13:44:00 managed-node3 kernel: podman1: port 2(veth1) entered blocking state
Jan 06 13:44:00 managed-node3 kernel: podman1: port 2(veth1) entered forwarding state
Jan 06 13:44:00 managed-node3 NetworkManager[778]: <info>  [1736189040.6882] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/5)
Jan 06 13:44:00 managed-node3 NetworkManager[778]: <info>  [1736189040.6907] device (veth1): carrier: link connected
Jan 06 13:44:00 managed-node3 (udev-worker)[35140]: Network interface NamePolicy= disabled on kernel command line.
Jan 06 13:44:00 managed-node3 systemd[1]: Started libpod-48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce.scope - libcrun container.
â–‘â–‘ Subject: A start job for unit libpod-48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit libpod-48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2643.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.800481187 -0500 EST m=+0.647960613 container init 48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce (image=localhost/podman-pause:5.3.1-1733097600, name=36d3302583a7-infra, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.803517218 -0500 EST m=+0.650996631 container start 48c70ece6166124f4fd98afbe1db3e3a691d21c15c3e48e2d015e5dc878345ce (image=localhost/podman-pause:5.3.1-1733097600, name=36d3302583a7-infra, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Jan 06 13:44:00 managed-node3 systemd[1]: Started libpod-9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027.scope - libcrun container.
â–‘â–‘ Subject: A start job for unit libpod-9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit libpod-9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2650.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.851592313 -0500 EST m=+0.699071673 container init 9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.853699661 -0500 EST m=+0.701179052 container start 9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 systemd[1]: Started libpod-b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced.scope - libcrun container.
â–‘â–‘ Subject: A start job for unit libpod-b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced.scope has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit libpod-b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced.scope has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2657.
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.927343403 -0500 EST m=+0.774822788 container init b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.929636085 -0500 EST m=+0.777115497 container start b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jan 06 13:44:00 managed-node3 podman[35074]: 2025-01-06 13:44:00.934233077 -0500 EST m=+0.781712461 pod start 36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922 (image=, name=quadlet-demo)
Jan 06 13:44:00 managed-node3 systemd[1]: Started quadlet-demo.service.
â–‘â–‘ Subject: A start job for unit quadlet-demo.service has finished successfully
â–‘â–‘ Defined-By: systemd
â–‘â–‘ Support: https://access.redhat.com/support
â–‘â–‘ 
â–‘â–‘ A start job for unit quadlet-demo.service has finished successfully.
â–‘â–‘ 
â–‘â–‘ The job identifier is 2543.
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Volumes:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: wp-pv-claim
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Pod:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: 36d3302583a74b71eac9d710b5c30740b8afda79609fc49bb9b189198df62922
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: Containers:
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: 9c5cf0b2b75a06bbb7cf1c87ec4e9f07867fcead241537993578943f9ab2b027
Jan 06 13:44:00 managed-node3 quadlet-demo[35074]: b8925aeec984a0a091a1663eda4eaebb690666dbbf56957cc924a9fca7a16ced
Jan 06 13:44:01 managed-node3 python3.12[35399]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:44:02 managed-node3 python3.12[35614]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:44:02 managed-node3 python3.12[35779]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:44:03 managed-node3 python3.12[35918]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:44:03 managed-node3 python3.12[36056]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 06 13:44:04 managed-node3 python3.12[36190]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:09 managed-node3 python3.12[36321]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:15 managed-node3 python3.12[36452]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:20 managed-node3 python3.12[36583]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:25 managed-node3 python3.12[36714]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:27 managed-node3 podman[36736]: 2025-01-06 13:44:27.976092234 -0500 EST m=+0.099114458 container health_status a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jan 06 13:44:31 managed-node3 python3.12[36861]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:36 managed-node3 python3.12[36992]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 06 13:44:37 managed-node3 python3.12[37123]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None

TASK [Check] *******************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148
Monday 06 January 2025  13:44:37 -0500 (0:00:00.438)       0:02:07.960 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "-a"
    ],
    "delta": "0:00:00.034375",
    "end": "2025-01-06 13:44:37.592155",
    "rc": 0,
    "start": "2025-01-06 13:44:37.557780"
}

STDOUT:

CONTAINER ID  IMAGE                                            COMMAND               CREATED             STATUS                       PORTS                                                      NAMES
a9d3cc1040b1  quay.io/linux-system-roles/mysql:5.6             mysqld                About a minute ago  Up About a minute (healthy)  3306/tcp                                                   quadlet-demo-mysql
5b2c81a505bb  localhost/podman-pause:5.3.1-1733097600                                37 seconds ago      Up 37 seconds                                                                           a96f3a51b8d1-service
48c70ece6166  localhost/podman-pause:5.3.1-1733097600                                37 seconds ago      Up 37 seconds                0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp             36d3302583a7-infra
9c5cf0b2b75a  quay.io/linux-system-roles/wordpress:4.8-apache  apache2-foregroun...  37 seconds ago      Up 37 seconds                0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp     quadlet-demo-wordpress
b8925aeec984  quay.io/linux-system-roles/envoyproxy:v1.25.0    envoy -c /etc/env...  37 seconds ago      Up 37 seconds                0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp  quadlet-demo-envoy

TASK [Check pods] **************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152
Monday 06 January 2025  13:44:37 -0500 (0:00:00.417)       0:02:08.378 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "pod",
        "ps",
        "--ctr-ids",
        "--ctr-names",
        "--ctr-status"
    ],
    "delta": "0:00:00.034257",
    "end": "2025-01-06 13:44:38.039530",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:38.005273"
}

STDOUT:

POD ID        NAME          STATUS      CREATED         INFRA ID      IDS                                     NAMES                                                         STATUS
36d3302583a7  quadlet-demo  Running     37 seconds ago  48c70ece6166  48c70ece6166,9c5cf0b2b75a,b8925aeec984  36d3302583a7-infra,quadlet-demo-wordpress,quadlet-demo-envoy  running,running,running

TASK [Check systemd] ***********************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157
Monday 06 January 2025  13:44:38 -0500 (0:00:00.441)       0:02:08.819 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
    "delta": "0:00:00.014395",
    "end": "2025-01-06 13:44:38.454708",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:38.440313"
}

STDOUT:

  quadlet-demo-mysql-volume.service                                                                                                    loaded    active   exited    quadlet-demo-mysql-volume.service
  quadlet-demo-mysql.service                                                                                                           loaded    active   running   quadlet-demo-mysql.service
  quadlet-demo-network.service                                                                                                         loaded    active   exited    quadlet-demo-network.service
  quadlet-demo.service                                                                                                                 loaded    active   running   quadlet-demo.service

TASK [LS] **********************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165
Monday 06 January 2025  13:44:38 -0500 (0:00:00.413)       0:02:09.233 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "ls",
        "-alrtF",
        "/etc/systemd/system"
    ],
    "delta": "0:00:00.004124",
    "end": "2025-01-06 13:44:38.865583",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-06 13:44:38.861459"
}

STDOUT:

total 12
drwxr-xr-x.  5 root root   47 Dec 20 02:21 ../
lrwxrwxrwx.  1 root root   43 Dec 20 02:21 dbus.service -> /usr/lib/systemd/system/dbus-broker.service
drwxr-xr-x.  2 root root   32 Dec 20 02:21 getty.target.wants/
lrwxrwxrwx.  1 root root   37 Dec 20 02:21 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
drwxr-xr-x.  2 root root   48 Dec 20 02:22 network-online.target.wants/
lrwxrwxrwx.  1 root root   57 Dec 20 02:22 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
drwxr-xr-x.  2 root root   76 Dec 20 02:22 timers.target.wants/
drwxr-xr-x.  2 root root   38 Dec 20 02:22 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/
lrwxrwxrwx.  1 root root   41 Dec 20 02:25 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x.  2 root root   31 Dec 20 02:37 remote-fs.target.wants/
drwxr-xr-x.  2 root root  119 Dec 20 02:38 cloud-init.target.wants/
drwxr-xr-x.  2 root root 4096 Dec 20 02:38 sysinit.target.wants/
drwxr-xr-x.  2 root root  113 Jan  6 13:40 sockets.target.wants/
drwxr-xr-x.  2 root root 4096 Jan  6 13:42 multi-user.target.wants/
lrwxrwxrwx.  1 root root   41 Jan  6 13:42 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
drwxr-xr-x. 11 root root 4096 Jan  6 13:42 ./

TASK [Cleanup] *****************************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172
Monday 06 January 2025  13:44:38 -0500 (0:00:00.447)       0:02:09.681 ******** 
included: fedora.linux_system_roles.podman for managed-node3

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 06 January 2025  13:44:39 -0500 (0:00:00.087)       0:02:09.768 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 06 January 2025  13:44:39 -0500 (0:00:00.051)       0:02:09.820 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 06 January 2025  13:44:39 -0500 (0:00:00.037)       0:02:09.857 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 06 January 2025  13:44:39 -0500 (0:00:00.031)       0:02:09.889 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 06 January 2025  13:44:39 -0500 (0:00:00.031)       0:02:09.921 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 06 January 2025  13:44:39 -0500 (0:00:00.030)       0:02:09.952 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 06 January 2025  13:44:39 -0500 (0:00:00.032)       0:02:09.984 ******** 
ok: [managed-node3] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 06 January 2025  13:44:39 -0500 (0:00:00.068)       0:02:10.052 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 06 January 2025  13:44:40 -0500 (0:00:00.764)       0:02:10.817 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 06 January 2025  13:44:40 -0500 (0:00:00.031)       0:02:10.849 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 06 January 2025  13:44:40 -0500 (0:00:00.033)       0:02:10.882 ******** 
skipping: [managed-node3] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 06 January 2025  13:44:40 -0500 (0:00:00.032)       0:02:10.915 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 06 January 2025  13:44:40 -0500 (0:00:00.031)       0:02:10.946 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 06 January 2025  13:44:40 -0500 (0:00:00.032)       0:02:10.978 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.023457",
    "end": "2025-01-06 13:44:40.618523",
    "rc": 0,
    "start": "2025-01-06 13:44:40.595066"
}

STDOUT:

podman version 5.3.1

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 06 January 2025  13:44:40 -0500 (0:00:00.420)       0:02:11.399 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 06 January 2025  13:44:40 -0500 (0:00:00.120)       0:02:11.519 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 06 January 2025  13:44:40 -0500 (0:00:00.045)       0:02:11.565 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 06 January 2025  13:44:40 -0500 (0:00:00.057)       0:02:11.623 ******** 
META: end_host conditional evaluated to False, continuing execution for managed-node3
skipping: [managed-node3] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node3

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 06 January 2025  13:44:40 -0500 (0:00:00.041)       0:02:11.664 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 06 January 2025  13:44:41 -0500 (0:00:00.056)       0:02:11.721 ******** 
META: end_host conditional evaluated to False, continuing execution for managed-node3
skipping: [managed-node3] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node3

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 06 January 2025  13:44:41 -0500 (0:00:00.056)       0:02:11.777 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:44:41 -0500 (0:00:00.094)       0:02:11.872 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:44:41 -0500 (0:00:00.036)       0:02:11.908 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:44:41 -0500 (0:00:00.036)       0:02:11.945 ******** 
ok: [managed-node3] => {}

MSG:

item {}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:44:41 -0500 (0:00:00.034)       0:02:11.980 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:44:41 -0500 (0:00:00.043)       0:02:12.023 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:44:41 -0500 (0:00:00.414)       0:02:12.438 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:44:41 -0500 (0:00:00.057)       0:02:12.496 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:44:41 -0500 (0:00:00.051)       0:02:12.547 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:44:41 -0500 (0:00:00.054)       0:02:12.601 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:44:41 -0500 (0:00:00.054)       0:02:12.655 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:44:42 -0500 (0:00:00.051)       0:02:12.707 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:44:42 -0500 (0:00:00.056)       0:02:12.764 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:44:42 -0500 (0:00:00.155)       0:02:12.920 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 06 January 2025  13:44:42 -0500 (0:00:00.088)       0:02:13.008 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Monday 06 January 2025  13:44:42 -0500 (0:00:00.068)       0:02:13.077 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 06 January 2025  13:44:42 -0500 (0:00:00.097)       0:02:13.174 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 06 January 2025  13:44:42 -0500 (0:00:00.052)       0:02:13.227 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Monday 06 January 2025  13:44:42 -0500 (0:00:00.052)       0:02:13.279 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 06 January 2025  13:44:42 -0500 (0:00:00.115)       0:02:13.395 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 06 January 2025  13:44:42 -0500 (0:00:00.065)       0:02:13.460 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Monday 06 January 2025  13:44:42 -0500 (0:00:00.066)       0:02:13.526 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Monday 06 January 2025  13:44:42 -0500 (0:00:00.107)       0:02:13.634 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Monday 06 January 2025  13:44:42 -0500 (0:00:00.050)       0:02:13.684 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Monday 06 January 2025  13:44:43 -0500 (0:00:00.052)       0:02:13.736 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Monday 06 January 2025  13:44:43 -0500 (0:00:00.114)       0:02:13.851 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Monday 06 January 2025  13:44:43 -0500 (0:00:00.055)       0:02:13.907 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Monday 06 January 2025  13:44:43 -0500 (0:00:00.064)       0:02:13.972 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Monday 06 January 2025  13:44:43 -0500 (0:00:00.089)       0:02:14.062 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Monday 06 January 2025  13:44:43 -0500 (0:00:00.074)       0:02:14.136 ******** 
included: fedora.linux_system_roles.firewall for managed-node3

TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 06 January 2025  13:44:43 -0500 (0:00:00.335)       0:02:14.472 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3

TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 06 January 2025  13:44:43 -0500 (0:00:00.110)       0:02:14.582 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 06 January 2025  13:44:43 -0500 (0:00:00.066)       0:02:14.648 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __firewall_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 06 January 2025  13:44:44 -0500 (0:00:00.055)       0:02:14.703 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __firewall_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 06 January 2025  13:44:44 -0500 (0:00:00.055)       0:02:14.759 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __firewall_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 06 January 2025  13:44:44 -0500 (0:00:00.050)       0:02:14.809 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __firewall_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Monday 06 January 2025  13:44:44 -0500 (0:00:00.055)       0:02:14.865 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Monday 06 January 2025  13:44:44 -0500 (0:00:00.747)       0:02:15.612 ******** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Monday 06 January 2025  13:44:44 -0500 (0:00:00.039)       0:02:15.652 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Monday 06 January 2025  13:44:44 -0500 (0:00:00.036)       0:02:15.688 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 06 January 2025  13:44:45 -0500 (0:00:00.035)       0:02:15.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Monday 06 January 2025  13:44:45 -0500 (0:00:00.048)       0:02:15.773 ******** 
skipping: [managed-node3] => (item=nftables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "nftables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=iptables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "iptables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=ufw)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "ufw",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Monday 06 January 2025  13:44:45 -0500 (0:00:00.055)       0:02:15.828 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": "firewalld",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ActiveEnterTimestampMonotonic": "415937494",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "polkit.service basic.target dbus.socket system.slice sysinit.target dbus-broker.service",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "AssertTimestampMonotonic": "415683410",
        "Before": "network-pre.target multi-user.target shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "410004000",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ConditionTimestampMonotonic": "415683407",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ip6tables.service shutdown.target ebtables.service ipset.service iptables.service",
        "ControlGroup": "/system.slice/firewalld.service",
        "ControlGroupId": "4930",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EffectiveCPUs": "0-1",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveMemoryNodes": "0",
        "EffectiveTasksMax": "22349",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ExecMainHandoffTimestampMonotonic": "415715983",
        "ExecMainPID": "27832",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ExecMainStartTimestampMonotonic": "415686149",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "InactiveExitTimestampMonotonic": "415686528",
        "InvocationID": "832872877ec346978863b0caa21bae35",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "27832",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "2594910208",
        "MemoryCurrent": "33964032",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "34234368",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "0",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "0",
        "MemoryZSwapCurrent": "0",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "dbus.socket system.slice sysinit.target",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:43:59 EST",
        "StateChangeTimestampMonotonic": "480606547",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "running",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "2",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "enabled",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Monday 06 January 2025  13:44:45 -0500 (0:00:00.575)       0:02:16.404 ******** 
ok: [managed-node3] => {
    "changed": false,
    "enabled": true,
    "name": "firewalld",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ActiveEnterTimestampMonotonic": "415937494",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "polkit.service basic.target dbus.socket system.slice sysinit.target dbus-broker.service",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "AssertTimestampMonotonic": "415683410",
        "Before": "network-pre.target multi-user.target shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "410004000",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ConditionTimestampMonotonic": "415683407",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ip6tables.service shutdown.target ebtables.service ipset.service iptables.service",
        "ControlGroup": "/system.slice/firewalld.service",
        "ControlGroupId": "4930",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DynamicUser": "no",
        "EffectiveCPUs": "0-1",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveMemoryNodes": "0",
        "EffectiveTasksMax": "22349",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ExecMainHandoffTimestampMonotonic": "415715983",
        "ExecMainPID": "27832",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "ExecMainStartTimestampMonotonic": "415686149",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:42:54 EST",
        "InactiveExitTimestampMonotonic": "415686528",
        "InvocationID": "832872877ec346978863b0caa21bae35",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "27832",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "2593812480",
        "MemoryCurrent": "33964032",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "34234368",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "0",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "0",
        "MemoryZSwapCurrent": "0",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "dbus.socket system.slice sysinit.target",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:43:59 EST",
        "StateChangeTimestampMonotonic": "480606547",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "running",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "2",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "enabled",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Monday 06 January 2025  13:44:46 -0500 (0:00:00.609)       0:02:17.013 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_previous_replaced": false,
        "__firewall_python_cmd": "/usr/bin/python3.12",
        "__firewall_report_changed": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Monday 06 January 2025  13:44:46 -0500 (0:00:00.130)       0:02:17.143 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Monday 06 January 2025  13:44:46 -0500 (0:00:00.089)       0:02:17.232 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Monday 06 January 2025  13:44:46 -0500 (0:00:00.084)       0:02:17.317 ******** 
ok: [managed-node3] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
    "__firewall_changed": false,
    "ansible_loop_var": "item",
    "changed": false,
    "item": {
        "port": "8000/tcp",
        "state": "enabled"
    }
}
ok: [managed-node3] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
    "__firewall_changed": false,
    "ansible_loop_var": "item",
    "changed": false,
    "item": {
        "port": "9000/tcp",
        "state": "enabled"
    }
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Monday 06 January 2025  13:44:47 -0500 (0:00:01.075)       0:02:18.393 ******** 
skipping: [managed-node3] => (item={'port': '8000/tcp', 'state': 'enabled'})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall | length == 1",
    "item": {
        "port": "8000/tcp",
        "state": "enabled"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item={'port': '9000/tcp', 'state': 'enabled'})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall | length == 1",
    "item": {
        "port": "9000/tcp",
        "state": "enabled"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Monday 06 January 2025  13:44:47 -0500 (0:00:00.190)       0:02:18.583 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall | length == 1",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Monday 06 January 2025  13:44:47 -0500 (0:00:00.063)       0:02:18.647 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Monday 06 January 2025  13:44:48 -0500 (0:00:00.052)       0:02:18.699 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Monday 06 January 2025  13:44:48 -0500 (0:00:00.084)       0:02:18.784 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Monday 06 January 2025  13:44:48 -0500 (0:00:00.101)       0:02:18.885 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Monday 06 January 2025  13:44:48 -0500 (0:00:00.091)       0:02:18.977 ******** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_previous_replaced | bool"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Monday 06 January 2025  13:44:48 -0500 (0:00:00.140)       0:02:19.117 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Monday 06 January 2025  13:44:48 -0500 (0:00:00.103)       0:02:19.220 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Monday 06 January 2025  13:44:48 -0500 (0:00:00.056)       0:02:19.277 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Monday 06 January 2025  13:44:48 -0500 (0:00:00.045)       0:02:19.322 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Monday 06 January 2025  13:44:48 -0500 (0:00:00.046)       0:02:19.369 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:44:48 -0500 (0:00:00.237)       0:02:19.607 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:44:49 -0500 (0:00:00.104)       0:02:19.711 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:44:49 -0500 (0:00:00.127)       0:02:19.839 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:44:49 -0500 (0:00:00.070)       0:02:19.909 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:44:49 -0500 (0:00:00.055)       0:02:19.965 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "name": "mysql-root-password-container",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:44:49 -0500 (0:00:00.062)       0:02:20.028 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:44:49 -0500 (0:00:00.069)       0:02:20.098 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:44:49 -0500 (0:00:00.051)       0:02:20.149 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:44:49 -0500 (0:00:00.117)       0:02:20.267 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:44:49 -0500 (0:00:00.034)       0:02:20.301 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:44:49 -0500 (0:00:00.037)       0:02:20.339 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:44:49 -0500 (0:00:00.037)       0:02:20.376 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:44:49 -0500 (0:00:00.032)       0:02:20.409 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:44:49 -0500 (0:00:00.037)       0:02:20.446 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:44:49 -0500 (0:00:00.035)       0:02:20.482 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:44:49 -0500 (0:00:00.031)       0:02:20.513 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:44:49 -0500 (0:00:00.039)       0:02:20.552 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:44:49 -0500 (0:00:00.106)       0:02:20.659 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:44:50 -0500 (0:00:00.057)       0:02:20.716 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:44:50 -0500 (0:00:00.061)       0:02:20.777 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:44:50 -0500 (0:00:00.083)       0:02:20.861 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:44:50 -0500 (0:00:00.104)       0:02:20.966 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:44:50 -0500 (0:00:00.571)       0:02:21.537 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:44:50 -0500 (0:00:00.071)       0:02:21.609 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:44:51 -0500 (0:00:00.181)       0:02:21.791 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:44:51 -0500 (0:00:00.098)       0:02:21.889 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:44:51 -0500 (0:00:00.065)       0:02:21.954 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "name": "mysql-root-password-kube",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:44:51 -0500 (0:00:00.143)       0:02:22.098 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:44:51 -0500 (0:00:00.046)       0:02:22.145 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:44:51 -0500 (0:00:00.038)       0:02:22.183 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:44:51 -0500 (0:00:00.039)       0:02:22.223 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:44:51 -0500 (0:00:00.035)       0:02:22.259 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:44:51 -0500 (0:00:00.032)       0:02:22.291 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:44:51 -0500 (0:00:00.033)       0:02:22.325 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:44:51 -0500 (0:00:00.031)       0:02:22.357 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:44:51 -0500 (0:00:00.037)       0:02:22.394 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:44:51 -0500 (0:00:00.039)       0:02:22.434 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:44:51 -0500 (0:00:00.051)       0:02:22.486 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:44:51 -0500 (0:00:00.058)       0:02:22.544 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:44:51 -0500 (0:00:00.097)       0:02:22.642 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:44:52 -0500 (0:00:00.061)       0:02:22.704 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:44:52 -0500 (0:00:00.044)       0:02:22.749 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:44:52 -0500 (0:00:00.037)       0:02:22.787 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:44:52 -0500 (0:00:00.030)       0:02:22.817 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 06 January 2025  13:44:52 -0500 (0:00:00.426)       0:02:23.243 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 06 January 2025  13:44:52 -0500 (0:00:00.059)       0:02:23.303 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:44:52 -0500 (0:00:00.083)       0:02:23.387 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:44:52 -0500 (0:00:00.100)       0:02:23.488 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:44:52 -0500 (0:00:00.053)       0:02:23.542 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "name": "envoy-certificates",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:44:52 -0500 (0:00:00.041)       0:02:23.583 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:44:52 -0500 (0:00:00.059)       0:02:23.643 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:44:53 -0500 (0:00:00.062)       0:02:23.705 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:44:53 -0500 (0:00:00.063)       0:02:23.769 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:44:53 -0500 (0:00:00.053)       0:02:23.822 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:44:53 -0500 (0:00:00.038)       0:02:23.861 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:44:53 -0500 (0:00:00.037)       0:02:23.899 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:44:53 -0500 (0:00:00.031)       0:02:23.930 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:44:53 -0500 (0:00:00.033)       0:02:23.964 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:44:53 -0500 (0:00:00.035)       0:02:24.000 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_check_subids | d(true)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Monday 06 January 2025  13:44:53 -0500 (0:00:00.045)       0:02:24.045 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_rootless": false,
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:19
Monday 06 January 2025  13:44:53 -0500 (0:00:00.084)       0:02:24.130 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:44:53 -0500 (0:00:00.118)       0:02:24.248 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:44:53 -0500 (0:00:00.049)       0:02:24.298 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:44:53 -0500 (0:00:00.041)       0:02:24.340 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:24
Monday 06 January 2025  13:44:53 -0500 (0:00:00.045)       0:02:24.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:40
Monday 06 January 2025  13:44:53 -0500 (0:00:00.052)       0:02:24.438 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Monday 06 January 2025  13:44:54 -0500 (0:00:00.470)       0:02:24.908 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Monday 06 January 2025  13:44:54 -0500 (0:00:00.045)       0:02:24.954 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:44:54 -0500 (0:00:00.366)       0:02:25.321 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo.kube",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:44:54 -0500 (0:00:00.135)       0:02:25.457 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:44:54 -0500 (0:00:00.076)       0:02:25.533 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:44:54 -0500 (0:00:00.098)       0:02:25.632 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:44:55 -0500 (0:00:00.075)       0:02:25.708 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:44:55 -0500 (0:00:00.157)       0:02:25.865 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:44:55 -0500 (0:00:00.098)       0:02:25.964 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:44:55 -0500 (0:00:00.109)       0:02:26.073 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo.kube",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:44:55 -0500 (0:00:00.086)       0:02:26.160 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:44:55 -0500 (0:00:00.111)       0:02:26.271 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:44:55 -0500 (0:00:00.416)       0:02:26.687 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:44:56 -0500 (0:00:00.053)       0:02:26.741 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:44:56 -0500 (0:00:00.055)       0:02:26.796 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:44:56 -0500 (0:00:00.051)       0:02:26.848 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:44:56 -0500 (0:00:00.105)       0:02:26.953 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:44:56 -0500 (0:00:00.051)       0:02:27.005 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:44:56 -0500 (0:00:00.090)       0:02:27.095 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:44:56 -0500 (0:00:00.091)       0:02:27.186 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:44:56 -0500 (0:00:00.052)       0:02:27.239 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": [
            "quadlet-demo.yml"
        ],
        "__podman_service_name": "quadlet-demo.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:44:56 -0500 (0:00:00.301)       0:02:27.540 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:44:56 -0500 (0:00:00.073)       0:02:27.614 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:44:56 -0500 (0:00:00.060)       0:02:27.674 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:44:57 -0500 (0:00:00.181)       0:02:27.855 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:44:57 -0500 (0:00:00.121)       0:02:27.977 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:44:57 -0500 (0:00:00.158)       0:02:28.135 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:44:57 -0500 (0:00:00.067)       0:02:28.203 ******** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": false,
    "failed_when_result": false,
    "name": "quadlet-demo.service",
    "state": "stopped",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "ActiveEnterTimestampMonotonic": "482165296",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "network-online.target quadlet-demo-network.service sysinit.target -.mount quadlet-demo-mysql.service system.slice basic.target systemd-journald.socket",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "AssertTimestampMonotonic": "481355738",
        "Before": "shutdown.target multi-user.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "470455000",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "ConditionTimestampMonotonic": "481355735",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroup": "/system.slice/quadlet-demo.service",
        "ControlGroupId": "6892",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveCPUs": "0-1",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveMemoryNodes": "0",
        "EffectiveTasksMax": "22349",
        "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "35129",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "ExecMainStartTimestampMonotonic": "482165258",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Mon 2025-01-06 13:44:00 EST] ; stop_time=[n/a] ; pid=35074 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Mon 2025-01-06 13:44:00 EST] ; stop_time=[n/a] ; pid=35074 ; code=(null) ; status=0/0 }",
        "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "InactiveExitTimestampMonotonic": "481357604",
        "InvocationID": "4b8a4272e9e0403c95aef7c8540edd7a",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "35129",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "2600509440",
        "MemoryCurrent": "3768320",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "35844096",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "0",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "0",
        "MemoryZSwapCurrent": "0",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "all",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "-.mount system.slice quadlet-demo-mysql.service quadlet-demo-network.service sysinit.target",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:44:00 EST",
        "StateChangeTimestampMonotonic": "482165296",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "running",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "4",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "notify",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:44:58 -0500 (0:00:01.402)       0:02:29.605 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736189038.575518,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
        "ctime": 1736189038.581518,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 155498479,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736189038.3035161,
        "nlink": 1,
        "path": "/etc/containers/systemd/quadlet-demo.kube",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 456,
        "uid": 0,
        "version": "344903448",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:44:59 -0500 (0:00:00.463)       0:02:30.068 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:44:59 -0500 (0:00:00.102)       0:02:30.171 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:44:59 -0500 (0:00:00.421)       0:02:30.592 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:44:59 -0500 (0:00:00.095)       0:02:30.688 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:00 -0500 (0:00:00.076)       0:02:30.765 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:00 -0500 (0:00:00.104)       0:02:30.870 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/quadlet-demo.kube",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:00 -0500 (0:00:00.480)       0:02:31.350 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:01 -0500 (0:00:00.819)       0:02:32.170 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:45:01 -0500 (0:00:00.106)       0:02:32.276 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:45:01 -0500 (0:00:00.096)       0:02:32.372 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:45:01 -0500 (0:00:00.294)       0:02:32.666 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.700039",
    "end": "2025-01-06 13:45:03.036392",
    "rc": 0,
    "start": "2025-01-06 13:45:02.336353"
}

STDOUT:

fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b
5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d
686eb9c9ce6c0454f7e2fa69c05872ef40a18e824a8fdf257567470dc5641f72

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:45:03 -0500 (0:00:01.190)       0:02:33.857 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:45:03 -0500 (0:00:00.080)       0:02:33.937 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:45:03 -0500 (0:00:00.037)       0:02:33.974 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:45:03 -0500 (0:00:00.036)       0:02:34.011 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:45:03 -0500 (0:00:00.033)       0:02:34.045 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.029629",
    "end": "2025-01-06 13:45:03.690119",
    "rc": 0,
    "start": "2025-01-06 13:45:03.660490"
}

STDOUT:

quay.io/linux-system-roles/mysql  5.6         dd3b2a5dcb48  3 years ago  308 MB

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:45:03 -0500 (0:00:00.407)       0:02:34.453 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.028740",
    "end": "2025-01-06 13:45:04.106784",
    "rc": 0,
    "start": "2025-01-06 13:45:04.078044"
}

STDOUT:

local       systemd-quadlet-demo-mysql
local       wp-pv-claim
local       envoy-proxy-config
local       envoy-certificates

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:45:04 -0500 (0:00:00.471)       0:02:34.924 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.031163",
    "end": "2025-01-06 13:45:04.616041",
    "rc": 0,
    "start": "2025-01-06 13:45:04.584878"
}

STDOUT:

a9d3cc1040b1  quay.io/linux-system-roles/mysql:5.6  mysqld      About a minute ago  Up About a minute (healthy)  3306/tcp    quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:45:04 -0500 (0:00:00.452)       0:02:35.377 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.026040",
    "end": "2025-01-06 13:45:05.017345",
    "rc": 0,
    "start": "2025-01-06 13:45:04.991305"
}

STDOUT:

podman
systemd-quadlet-demo

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:45:05 -0500 (0:00:00.477)       0:02:35.854 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:45:05 -0500 (0:00:00.445)       0:02:36.300 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:45:06 -0500 (0:00:00.453)       0:02:36.753 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service": {
                "name": "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service",
                "source": "systemd",
                "state": "stopped",
                "status": "failed"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quadlet-demo-mysql-volume.service": {
                "name": "quadlet-demo-mysql-volume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quadlet-demo-mysql.service": {
                "name": "quadlet-demo-mysql.service",
                "source": "systemd",
                "state": "running",
                "status": "generated"
            },
            "quadlet-demo-network.service": {
                "name": "quadlet-demo-network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:45:08 -0500 (0:00:02.342)       0:02:39.096 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:45:08 -0500 (0:00:00.064)       0:02:39.161 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n  name: wp-pv-claim\n  labels:\n    app: wordpress\nspec:\n  accessModes:\n  - ReadWriteOnce\n  resources:\n    requests:\n      storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n  name: quadlet-demo\nspec:\n  containers:\n  - name: wordpress\n    image: quay.io/linux-system-roles/wordpress:4.8-apache\n    env:\n    - name: WORDPRESS_DB_HOST\n      value: quadlet-demo-mysql\n    - name: WORDPRESS_DB_PASSWORD\n      valueFrom:\n        secretKeyRef:\n          name: mysql-root-password-kube\n          key: password\n    volumeMounts:\n    - name: wordpress-persistent-storage\n      mountPath: /var/www/html\n    resources:\n      requests:\n        memory: \"64Mi\"\n        cpu: \"250m\"\n      limits:\n        memory: \"128Mi\"\n        cpu: \"500m\"\n  - name: envoy\n    image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n    volumeMounts:\n    - name: config-volume\n      mountPath: /etc/envoy\n    - name: certificates\n      mountPath: /etc/envoy-certificates\n    env:\n    - name: ENVOY_UID\n      value: \"0\"\n    resources:\n      requests:\n        memory: \"64Mi\"\n        cpu: \"250m\"\n      limits:\n        memory: \"128Mi\"\n        cpu: \"500m\"\n  volumes:\n  - name: config-volume\n    configMap:\n      name: envoy-proxy-config\n  - name: certificates\n    secret:\n      secretName: envoy-certificates\n  - name: wordpress-persistent-storage\n    persistentVolumeClaim:\n      claimName: wp-pv-claim\n  - name: www  # not used - for testing hostpath\n    hostPath:\n      path: /tmp/httpd3\n  - name: create  # not used - for testing hostpath\n    hostPath:\n      path: /tmp/httpd3-create\n",
        "__podman_quadlet_template_src": "quadlet-demo.yml.j2"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:45:08 -0500 (0:00:00.115)       0:02:39.276 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:45:08 -0500 (0:00:00.044)       0:02:39.320 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_str",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:45:08 -0500 (0:00:00.040)       0:02:39.361 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "yml",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:45:08 -0500 (0:00:00.076)       0:02:39.437 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:45:08 -0500 (0:00:00.140)       0:02:39.578 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:45:08 -0500 (0:00:00.044)       0:02:39.622 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:45:08 -0500 (0:00:00.043)       0:02:39.665 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "state": "absent",
    "template_src": "quadlet-demo.yml.j2"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:45:09 -0500 (0:00:00.038)       0:02:39.704 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:45:09 -0500 (0:00:00.050)       0:02:39.755 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:45:09 -0500 (0:00:00.382)       0:02:40.137 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:45:09 -0500 (0:00:00.040)       0:02:40.178 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:45:09 -0500 (0:00:00.064)       0:02:40.242 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:45:09 -0500 (0:00:00.038)       0:02:40.280 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:45:09 -0500 (0:00:00.036)       0:02:40.317 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:45:09 -0500 (0:00:00.037)       0:02:40.354 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:45:09 -0500 (0:00:00.037)       0:02:40.391 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:45:09 -0500 (0:00:00.035)       0:02:40.426 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:45:09 -0500 (0:00:00.040)       0:02:40.466 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:45:09 -0500 (0:00:00.087)       0:02:40.554 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:45:09 -0500 (0:00:00.062)       0:02:40.616 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:45:10 -0500 (0:00:00.133)       0:02:40.750 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:45:10 -0500 (0:00:00.129)       0:02:40.880 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:45:10 -0500 (0:00:00.069)       0:02:40.950 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:45:10 -0500 (0:00:00.135)       0:02:41.085 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:45:10 -0500 (0:00:00.060)       0:02:41.146 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:45:10 -0500 (0:00:00.066)       0:02:41.212 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736189018.077357,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
        "ctime": 1736189014.6323233,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 83886314,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736189014.3123202,
        "nlink": 1,
        "path": "/etc/containers/systemd/quadlet-demo.yml",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1605,
        "uid": 0,
        "version": "278937917",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:45:10 -0500 (0:00:00.431)       0:02:41.644 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:45:11 -0500 (0:00:00.095)       0:02:41.739 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:45:11 -0500 (0:00:00.388)       0:02:42.127 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:45:11 -0500 (0:00:00.059)       0:02:42.186 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:11 -0500 (0:00:00.091)       0:02:42.278 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:11 -0500 (0:00:00.091)       0:02:42.369 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/quadlet-demo.yml",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:12 -0500 (0:00:00.431)       0:02:42.801 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:12 -0500 (0:00:00.763)       0:02:43.565 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:45:13 -0500 (0:00:00.154)       0:02:43.719 ******** 
changed: [managed-node3] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:45:14 -0500 (0:00:01.306)       0:02:45.025 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:45:14 -0500 (0:00:00.045)       0:02:45.070 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.027303",
    "end": "2025-01-06 13:45:14.711311",
    "rc": 0,
    "start": "2025-01-06 13:45:14.684008"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:45:14 -0500 (0:00:00.406)       0:02:45.477 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:45:14 -0500 (0:00:00.066)       0:02:45.543 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:45:14 -0500 (0:00:00.057)       0:02:45.601 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:45:14 -0500 (0:00:00.071)       0:02:45.673 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:45:15 -0500 (0:00:00.042)       0:02:45.715 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.028493",
    "end": "2025-01-06 13:45:15.366398",
    "rc": 0,
    "start": "2025-01-06 13:45:15.337905"
}

STDOUT:

quay.io/linux-system-roles/mysql  5.6         dd3b2a5dcb48  3 years ago  308 MB

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:45:15 -0500 (0:00:00.426)       0:02:46.142 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.027467",
    "end": "2025-01-06 13:45:15.811825",
    "rc": 0,
    "start": "2025-01-06 13:45:15.784358"
}

STDOUT:

local       systemd-quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:45:15 -0500 (0:00:00.483)       0:02:46.626 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.030926",
    "end": "2025-01-06 13:45:16.362263",
    "rc": 0,
    "start": "2025-01-06 13:45:16.331337"
}

STDOUT:

a9d3cc1040b1  quay.io/linux-system-roles/mysql:5.6  mysqld      About a minute ago  Up About a minute (healthy)  3306/tcp    quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:45:16 -0500 (0:00:00.523)       0:02:47.150 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.026434",
    "end": "2025-01-06 13:45:16.816014",
    "rc": 0,
    "start": "2025-01-06 13:45:16.789580"
}

STDOUT:

podman
systemd-quadlet-demo

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:45:16 -0500 (0:00:00.481)       0:02:47.632 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:45:17 -0500 (0:00:00.518)       0:02:48.150 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:45:17 -0500 (0:00:00.442)       0:02:48.592 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service": {
                "name": "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service",
                "source": "systemd",
                "state": "stopped",
                "status": "failed"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quadlet-demo-mysql-volume.service": {
                "name": "quadlet-demo-mysql-volume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quadlet-demo-mysql.service": {
                "name": "quadlet-demo-mysql.service",
                "source": "systemd",
                "state": "running",
                "status": "generated"
            },
            "quadlet-demo-network.service": {
                "name": "quadlet-demo-network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:45:19 -0500 (0:00:01.962)       0:02:50.554 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:45:20 -0500 (0:00:00.154)       0:02:50.709 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n  name: envoy-proxy-config\ndata:\n  envoy.yaml: |\n    admin:\n      address:\n        socket_address:\n          address: 0.0.0.0\n          port_value: 9901\n\n    static_resources:\n      listeners:\n      - name: listener_0\n        address:\n          socket_address:\n            address: 0.0.0.0\n            port_value: 8080\n        filter_chains:\n        - filters:\n          - name: envoy.filters.network.http_connection_manager\n            typed_config:\n              \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n              stat_prefix: ingress_http\n              codec_type: AUTO\n              route_config:\n                name: local_route\n                virtual_hosts:\n                - name: local_service\n                  domains: [\"*\"]\n                  routes:\n                  - match:\n                      prefix: \"/\"\n                    route:\n                      cluster: backend\n              http_filters:\n              - name: envoy.filters.http.router\n                typed_config:\n                  \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n          transport_socket:\n            name: envoy.transport_sockets.tls\n            typed_config:\n              \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n              common_tls_context:\n                tls_certificates:\n                - certificate_chain:\n                    filename: /etc/envoy-certificates/certificate.pem\n                  private_key:\n                    filename: /etc/envoy-certificates/certificate.key\n      clusters:\n      - name: backend\n        connect_timeout: 5s\n        type: STATIC\n        dns_refresh_rate: 1800s\n        lb_policy: ROUND_ROBIN\n        load_assignment:\n          cluster_name: backend\n          endpoints:\n          - lb_endpoints:\n            - endpoint:\n                address:\n                  socket_address:\n                    address: 127.0.0.1\n                    port_value: 80",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:45:20 -0500 (0:00:00.079)       0:02:50.788 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:45:20 -0500 (0:00:00.076)       0:02:50.865 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:45:20 -0500 (0:00:00.094)       0:02:50.959 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "envoy-proxy-configmap",
        "__podman_quadlet_type": "yml",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:45:20 -0500 (0:00:00.127)       0:02:51.087 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:45:20 -0500 (0:00:00.131)       0:02:51.219 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:45:20 -0500 (0:00:00.070)       0:02:51.289 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:45:20 -0500 (0:00:00.060)       0:02:51.350 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "envoy-proxy-configmap.yml",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:45:20 -0500 (0:00:00.064)       0:02:51.414 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:45:20 -0500 (0:00:00.110)       0:02:51.525 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:45:21 -0500 (0:00:00.445)       0:02:51.971 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:45:21 -0500 (0:00:00.090)       0:02:52.062 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:45:21 -0500 (0:00:00.072)       0:02:52.134 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:45:21 -0500 (0:00:00.061)       0:02:52.196 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:45:21 -0500 (0:00:00.060)       0:02:52.256 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:45:21 -0500 (0:00:00.063)       0:02:52.320 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:45:21 -0500 (0:00:00.238)       0:02:52.558 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:45:21 -0500 (0:00:00.119)       0:02:52.677 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:45:22 -0500 (0:00:00.099)       0:02:52.777 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:45:22 -0500 (0:00:00.107)       0:02:52.885 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:45:22 -0500 (0:00:00.072)       0:02:52.957 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:45:22 -0500 (0:00:00.067)       0:02:53.025 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:45:22 -0500 (0:00:00.169)       0:02:53.195 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:45:22 -0500 (0:00:00.072)       0:02:53.267 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:45:22 -0500 (0:00:00.178)       0:02:53.445 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:45:22 -0500 (0:00:00.058)       0:02:53.504 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_service_name | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:45:22 -0500 (0:00:00.065)       0:02:53.570 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736189040.4735312,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "d681c7d56f912150d041873e880818b22a90c188",
        "ctime": 1736189009.883277,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 41943267,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736189009.5392735,
        "nlink": 1,
        "path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 2102,
        "uid": 0,
        "version": "2640882634",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:45:23 -0500 (0:00:00.489)       0:02:54.059 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:45:23 -0500 (0:00:00.146)       0:02:54.206 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:45:23 -0500 (0:00:00.467)       0:02:54.673 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:45:24 -0500 (0:00:00.060)       0:02:54.734 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:24 -0500 (0:00:00.137)       0:02:54.871 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:24 -0500 (0:00:00.066)       0:02:54.938 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:24 -0500 (0:00:00.484)       0:02:55.422 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:25 -0500 (0:00:00.795)       0:02:56.217 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:45:25 -0500 (0:00:00.062)       0:02:56.280 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:45:25 -0500 (0:00:00.154)       0:02:56.434 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:45:25 -0500 (0:00:00.069)       0:02:56.503 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.029577",
    "end": "2025-01-06 13:45:26.185181",
    "rc": 0,
    "start": "2025-01-06 13:45:26.155604"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:45:26 -0500 (0:00:00.449)       0:02:56.953 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:45:26 -0500 (0:00:00.091)       0:02:57.045 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:45:26 -0500 (0:00:00.037)       0:02:57.083 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:45:26 -0500 (0:00:00.044)       0:02:57.127 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:45:26 -0500 (0:00:00.059)       0:02:57.186 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.029526",
    "end": "2025-01-06 13:45:26.862305",
    "rc": 0,
    "start": "2025-01-06 13:45:26.832779"
}

STDOUT:

quay.io/linux-system-roles/mysql  5.6         dd3b2a5dcb48  3 years ago  308 MB

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:45:26 -0500 (0:00:00.459)       0:02:57.646 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.026592",
    "end": "2025-01-06 13:45:27.326091",
    "rc": 0,
    "start": "2025-01-06 13:45:27.299499"
}

STDOUT:

local       systemd-quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:45:27 -0500 (0:00:00.465)       0:02:58.112 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.033224",
    "end": "2025-01-06 13:45:27.783896",
    "rc": 0,
    "start": "2025-01-06 13:45:27.750672"
}

STDOUT:

a9d3cc1040b1  quay.io/linux-system-roles/mysql:5.6  mysqld      2 minutes ago  Up 2 minutes (healthy)  3306/tcp    quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:45:27 -0500 (0:00:00.438)       0:02:58.550 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.026732",
    "end": "2025-01-06 13:45:28.212486",
    "rc": 0,
    "start": "2025-01-06 13:45:28.185754"
}

STDOUT:

podman
systemd-quadlet-demo

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:45:28 -0500 (0:00:00.429)       0:02:58.980 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:45:28 -0500 (0:00:00.601)       0:02:59.581 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:45:29 -0500 (0:00:00.409)       0:02:59.991 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service": {
                "name": "a9d3cc1040b1e5f0254254904e0fe071ddf92268fb2ab1167defc8fa41d200cf-7d2355e9d8516c6e.service",
                "source": "systemd",
                "state": "stopped",
                "status": "failed"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quadlet-demo-mysql-volume.service": {
                "name": "quadlet-demo-mysql-volume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quadlet-demo-mysql.service": {
                "name": "quadlet-demo-mysql.service",
                "source": "systemd",
                "state": "running",
                "status": "generated"
            },
            "quadlet-demo-network.service": {
                "name": "quadlet-demo-network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:45:31 -0500 (0:00:01.861)       0:03:01.852 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:45:31 -0500 (0:00:00.037)       0:03:01.890 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
        "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:45:31 -0500 (0:00:00.170)       0:03:02.061 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:45:31 -0500 (0:00:00.063)       0:03:02.124 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_str",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:45:31 -0500 (0:00:00.043)       0:03:02.168 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo-mysql",
        "__podman_quadlet_type": "container",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:45:31 -0500 (0:00:00.064)       0:03:02.232 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:45:31 -0500 (0:00:00.067)       0:03:02.300 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:45:31 -0500 (0:00:00.043)       0:03:02.344 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:45:31 -0500 (0:00:00.060)       0:03:02.404 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "state": "absent",
    "template_src": "quadlet-demo-mysql.container.j2"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:45:31 -0500 (0:00:00.073)       0:03:02.478 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:45:31 -0500 (0:00:00.169)       0:03:02.647 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:45:32 -0500 (0:00:00.560)       0:03:03.207 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:45:32 -0500 (0:00:00.092)       0:03:03.300 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:45:32 -0500 (0:00:00.190)       0:03:03.491 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:45:32 -0500 (0:00:00.118)       0:03:03.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:45:33 -0500 (0:00:00.099)       0:03:03.708 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:45:33 -0500 (0:00:00.099)       0:03:03.808 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:45:33 -0500 (0:00:00.069)       0:03:03.877 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:45:33 -0500 (0:00:00.072)       0:03:03.950 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:45:33 -0500 (0:00:00.061)       0:03:04.012 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [
            "quay.io/linux-system-roles/mysql:5.6"
        ],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-mysql.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:45:33 -0500 (0:00:00.097)       0:03:04.110 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:45:33 -0500 (0:00:00.089)       0:03:04.199 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:45:33 -0500 (0:00:00.098)       0:03:04.298 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [
            "quay.io/linux-system-roles/mysql:5.6"
        ],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
        "__podman_volumes": [
            "/tmp/quadlet_demo"
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:45:33 -0500 (0:00:00.150)       0:03:04.449 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:45:33 -0500 (0:00:00.061)       0:03:04.510 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:45:33 -0500 (0:00:00.098)       0:03:04.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:45:33 -0500 (0:00:00.039)       0:03:04.649 ******** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": false,
    "failed_when_result": false,
    "name": "quadlet-demo-mysql.service",
    "state": "stopped",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:43:26 EST",
        "ActiveEnterTimestampMonotonic": "447519288",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "system.slice systemd-journald.socket network-online.target tmp.mount -.mount quadlet-demo-mysql-volume.service sysinit.target basic.target quadlet-demo-network.service",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:43:25 EST",
        "AssertTimestampMonotonic": "447174824",
        "Before": "multi-user.target shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "2878275000",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:43:25 EST",
        "ConditionTimestampMonotonic": "447174821",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroup": "/system.slice/quadlet-demo-mysql.service",
        "ControlGroupId": "5688",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "yes",
        "DelegateControllers": "cpu cpuset io memory pids",
        "Description": "quadlet-demo-mysql.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveCPUs": "0-1",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveMemoryNodes": "0",
        "EffectiveTasksMax": "22349",
        "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "31613",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:43:26 EST",
        "ExecMainStartTimestampMonotonic": "447519242",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-mysql.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:43:25 EST",
        "InactiveExitTimestampMonotonic": "447182884",
        "InvocationID": "b85fd1c883d94cef899ec57349e12b76",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "31613",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "2683764736",
        "MemoryCurrent": "604426240",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "643510272",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "0",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "0",
        "MemoryZSwapCurrent": "0",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-mysql.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "all",
        "OOMPolicy": "continue",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "quadlet-demo-network.service system.slice sysinit.target -.mount quadlet-demo-mysql-volume.service",
        "RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:43:26 EST",
        "StateChangeTimestampMonotonic": "447519288",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "running",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-mysql",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "23",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "notify",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:45:36 -0500 (0:00:02.131)       0:03:06.780 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736189004.5242243,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
        "ctime": 1736189004.5302243,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 553648356,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736189004.2422216,
        "nlink": 1,
        "path": "/etc/containers/systemd/quadlet-demo-mysql.container",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 363,
        "uid": 0,
        "version": "4148627256",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:45:36 -0500 (0:00:00.469)       0:03:07.250 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:45:36 -0500 (0:00:00.080)       0:03:07.330 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:45:37 -0500 (0:00:00.390)       0:03:07.721 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:45:37 -0500 (0:00:00.067)       0:03:07.788 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:37 -0500 (0:00:00.062)       0:03:07.851 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:37 -0500 (0:00:00.054)       0:03:07.906 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/quadlet-demo-mysql.container",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:37 -0500 (0:00:00.409)       0:03:08.316 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:38 -0500 (0:00:00.772)       0:03:09.088 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:45:38 -0500 (0:00:00.461)       0:03:09.550 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:45:38 -0500 (0:00:00.058)       0:03:09.608 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:45:38 -0500 (0:00:00.041)       0:03:09.649 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.246812",
    "end": "2025-01-06 13:45:39.510009",
    "rc": 0,
    "start": "2025-01-06 13:45:39.263197"
}

STDOUT:

dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:45:39 -0500 (0:00:00.631)       0:03:10.281 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:45:39 -0500 (0:00:00.104)       0:03:10.386 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:45:39 -0500 (0:00:00.056)       0:03:10.443 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:45:39 -0500 (0:00:00.063)       0:03:10.506 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:45:39 -0500 (0:00:00.165)       0:03:10.672 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.030346",
    "end": "2025-01-06 13:45:40.348365",
    "rc": 0,
    "start": "2025-01-06 13:45:40.318019"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:45:40 -0500 (0:00:00.518)       0:03:11.190 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.030946",
    "end": "2025-01-06 13:45:40.935367",
    "rc": 0,
    "start": "2025-01-06 13:45:40.904421"
}

STDOUT:

local       systemd-quadlet-demo-mysql

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:45:41 -0500 (0:00:00.566)       0:03:11.757 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.030136",
    "end": "2025-01-06 13:45:41.441481",
    "rc": 0,
    "start": "2025-01-06 13:45:41.411345"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:45:41 -0500 (0:00:00.456)       0:03:12.213 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.030594",
    "end": "2025-01-06 13:45:41.865366",
    "rc": 0,
    "start": "2025-01-06 13:45:41.834772"
}

STDOUT:

podman
systemd-quadlet-demo

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:45:41 -0500 (0:00:00.448)       0:03:12.662 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:45:42 -0500 (0:00:00.437)       0:03:13.099 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:45:42 -0500 (0:00:00.516)       0:03:13.616 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quadlet-demo-mysql-volume.service": {
                "name": "quadlet-demo-mysql-volume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quadlet-demo-network.service": {
                "name": "quadlet-demo-network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:45:44 -0500 (0:00:01.928)       0:03:15.544 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:45:44 -0500 (0:00:00.059)       0:03:15.603 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Volume]",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:45:44 -0500 (0:00:00.077)       0:03:15.681 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:45:45 -0500 (0:00:00.074)       0:03:15.756 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:45:45 -0500 (0:00:00.059)       0:03:15.816 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo-mysql",
        "__podman_quadlet_type": "volume",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:45:45 -0500 (0:00:00.084)       0:03:15.901 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:45:45 -0500 (0:00:00.114)       0:03:16.015 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:45:45 -0500 (0:00:00.063)       0:03:16.078 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:45:45 -0500 (0:00:00.149)       0:03:16.228 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo-mysql.volume",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:45:45 -0500 (0:00:00.065)       0:03:16.293 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:45:45 -0500 (0:00:00.112)       0:03:16.405 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:45:46 -0500 (0:00:00.438)       0:03:16.844 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:45:46 -0500 (0:00:00.061)       0:03:16.905 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:45:46 -0500 (0:00:00.056)       0:03:16.962 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:45:46 -0500 (0:00:00.063)       0:03:17.026 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:45:46 -0500 (0:00:00.061)       0:03:17.087 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:45:46 -0500 (0:00:00.064)       0:03:17.151 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:45:46 -0500 (0:00:00.064)       0:03:17.216 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:45:46 -0500 (0:00:00.040)       0:03:17.256 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:45:46 -0500 (0:00:00.043)       0:03:17.300 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-mysql-volume.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:45:46 -0500 (0:00:00.069)       0:03:17.370 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:45:46 -0500 (0:00:00.052)       0:03:17.422 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:45:46 -0500 (0:00:00.039)       0:03:17.462 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:45:46 -0500 (0:00:00.079)       0:03:17.542 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:45:46 -0500 (0:00:00.054)       0:03:17.596 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:45:47 -0500 (0:00:00.164)       0:03:17.760 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:45:47 -0500 (0:00:00.038)       0:03:17.798 ******** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": false,
    "failed_when_result": false,
    "name": "quadlet-demo-mysql-volume.service",
    "state": "stopped",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "ActiveEnterTimestampMonotonic": "434375563",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "-.mount system.slice sysinit.target network-online.target systemd-journald.socket basic.target",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "AssertTimestampMonotonic": "434326935",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "ConditionTimestampMonotonic": "434326932",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo-mysql-volume.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "ExecMainCode": "1",
        "ExecMainExitTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "ExecMainExitTimestampMonotonic": "434375409",
        "ExecMainHandoffTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "ExecMainHandoffTimestampMonotonic": "434336430",
        "ExecMainPID": "30271",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "ExecMainStartTimestampMonotonic": "434327704",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-mysql-volume.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "InactiveExitTimestampMonotonic": "434328176",
        "InvocationID": "0ae093529fc94da59c3dd33e12d0e1be",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3212238848",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-mysql-volume.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "yes",
        "RemoveIPC": "no",
        "Requires": "-.mount system.slice sysinit.target",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:43:13 EST",
        "StateChangeTimestampMonotonic": "434375563",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "exited",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-mysql-volume",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "infinity",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "oneshot",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:45:47 -0500 (0:00:00.802)       0:03:18.601 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188991.720099,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
        "ctime": 1736188991.726099,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 209715401,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736188991.404096,
        "nlink": 1,
        "path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 9,
        "uid": 0,
        "version": "3349639187",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:45:48 -0500 (0:00:00.426)       0:03:19.028 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:45:48 -0500 (0:00:00.090)       0:03:19.118 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:45:48 -0500 (0:00:00.394)       0:03:19.512 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:45:48 -0500 (0:00:00.070)       0:03:19.583 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:48 -0500 (0:00:00.047)       0:03:19.631 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:48 -0500 (0:00:00.041)       0:03:19.672 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:49 -0500 (0:00:00.377)       0:03:20.050 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:50 -0500 (0:00:00.715)       0:03:20.765 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:45:50 -0500 (0:00:00.428)       0:03:21.194 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:45:50 -0500 (0:00:00.046)       0:03:21.240 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:45:50 -0500 (0:00:00.037)       0:03:21.278 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.028482",
    "end": "2025-01-06 13:45:50.911406",
    "rc": 0,
    "start": "2025-01-06 13:45:50.882924"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:45:50 -0500 (0:00:00.399)       0:03:21.677 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:45:51 -0500 (0:00:00.120)       0:03:21.798 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:45:51 -0500 (0:00:00.034)       0:03:21.833 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:45:51 -0500 (0:00:00.034)       0:03:21.868 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:45:51 -0500 (0:00:00.034)       0:03:21.902 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.030782",
    "end": "2025-01-06 13:45:51.540627",
    "rc": 0,
    "start": "2025-01-06 13:45:51.509845"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:45:51 -0500 (0:00:00.404)       0:03:22.306 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.028536",
    "end": "2025-01-06 13:45:51.939892",
    "rc": 0,
    "start": "2025-01-06 13:45:51.911356"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:45:52 -0500 (0:00:00.398)       0:03:22.705 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.028174",
    "end": "2025-01-06 13:45:52.342047",
    "rc": 0,
    "start": "2025-01-06 13:45:52.313873"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:45:52 -0500 (0:00:00.402)       0:03:23.107 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.029832",
    "end": "2025-01-06 13:45:52.748359",
    "rc": 0,
    "start": "2025-01-06 13:45:52.718527"
}

STDOUT:

podman
systemd-quadlet-demo

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:45:52 -0500 (0:00:00.406)       0:03:23.514 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:45:53 -0500 (0:00:00.405)       0:03:23.920 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:45:53 -0500 (0:00:00.406)       0:03:24.326 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quadlet-demo-network.service": {
                "name": "quadlet-demo-network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "generated"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:45:55 -0500 (0:00:01.845)       0:03:26.172 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 06 January 2025  13:45:55 -0500 (0:00:00.035)       0:03:26.207 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "quadlet-demo.network",
        "__podman_quadlet_spec": {},
        "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 06 January 2025  13:45:55 -0500 (0:00:00.047)       0:03:26.254 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 06 January 2025  13:45:55 -0500 (0:00:00.043)       0:03:26.298 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_quadlet_file_src",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 06 January 2025  13:45:55 -0500 (0:00:00.036)       0:03:26.334 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_name": "quadlet-demo",
        "__podman_quadlet_type": "network",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 06 January 2025  13:45:55 -0500 (0:00:00.105)       0:03:26.440 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 06 January 2025  13:45:55 -0500 (0:00:00.066)       0:03:26.507 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 06 January 2025  13:45:55 -0500 (0:00:00.038)       0:03:26.546 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Debug] ********************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 06 January 2025  13:45:55 -0500 (0:00:00.040)       0:03:26.586 ******** 
ok: [managed-node3] => {}

MSG:

item {
    "file_src": "quadlet-demo.network",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20
Monday 06 January 2025  13:45:55 -0500 (0:00:00.039)       0:03:26.626 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Monday 06 January 2025  13:45:55 -0500 (0:00:00.050)       0:03:26.676 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188868.5129364,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1736188848.6497798,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 9125782,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "4070602005",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46
Monday 06 January 2025  13:45:56 -0500 (0:00:00.376)       0:03:27.053 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51
Monday 06 January 2025  13:45:56 -0500 (0:00:00.035)       0:03:27.089 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56
Monday 06 January 2025  13:45:56 -0500 (0:00:00.037)       0:03:27.126 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69
Monday 06 January 2025  13:45:56 -0500 (0:00:00.037)       0:03:27.164 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Monday 06 January 2025  13:45:56 -0500 (0:00:00.036)       0:03:27.201 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Monday 06 January 2025  13:45:56 -0500 (0:00:00.037)       0:03:27.238 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Monday 06 January 2025  13:45:56 -0500 (0:00:00.037)       0:03:27.276 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96
Monday 06 January 2025  13:45:56 -0500 (0:00:00.035)       0:03:27.311 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 06 January 2025  13:45:56 -0500 (0:00:00.036)       0:03:27.347 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_images_found": [],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "quadlet-demo-network.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 06 January 2025  13:45:56 -0500 (0:00:00.111)       0:03:27.459 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 06 January 2025  13:45:56 -0500 (0:00:00.038)       0:03:27.497 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Monday 06 January 2025  13:45:56 -0500 (0:00:00.035)       0:03:27.532 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_images": [],
        "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Monday 06 January 2025  13:45:56 -0500 (0:00:00.079)       0:03:27.611 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Monday 06 January 2025  13:45:56 -0500 (0:00:00.041)       0:03:27.653 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 06 January 2025  13:45:57 -0500 (0:00:00.081)       0:03:27.735 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 06 January 2025  13:45:57 -0500 (0:00:00.048)       0:03:27.783 ******** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": false,
    "failed_when_result": false,
    "name": "quadlet-demo-network.service",
    "state": "stopped",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "ActiveEnterTimestampMonotonic": "429294672",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "active",
        "After": "systemd-journald.socket sysinit.target -.mount network-online.target basic.target system.slice",
        "AllowIsolate": "no",
        "AssertResult": "yes",
        "AssertTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "AssertTimestampMonotonic": "429252638",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "yes",
        "ConditionTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "ConditionTimestampMonotonic": "429252635",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "quadlet-demo-network.service",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698229248",
        "EffectiveMemoryMax": "3698229248",
        "EffectiveTasksMax": "22349",
        "ExecMainCode": "1",
        "ExecMainExitTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "ExecMainExitTimestampMonotonic": "429294509",
        "ExecMainHandoffTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "ExecMainHandoffTimestampMonotonic": "429263462",
        "ExecMainPID": "29445",
        "ExecMainStartTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "ExecMainStartTimestampMonotonic": "429253446",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "quadlet-demo-network.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "InactiveExitTimestampMonotonic": "429253857",
        "InvocationID": "30398d836e074582b7befad847c38792",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13968",
        "LimitNPROCSoft": "13968",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13968",
        "LimitSIGPENDINGSoft": "13968",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3182465024",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "quadlet-demo-network.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "yes",
        "RemoveIPC": "no",
        "Requires": "system.slice sysinit.target -.mount",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SourcePath": "/etc/containers/systemd/quadlet-demo.network",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Mon 2025-01-06 13:43:08 EST",
        "StateChangeTimestampMonotonic": "429294672",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "exited",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogIdentifier": "quadlet-demo-network",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22349",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "infinity",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "oneshot",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "generated",
        "UtmpMode": "init",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "0"
    }
}

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Monday 06 January 2025  13:45:57 -0500 (0:00:00.764)       0:03:28.547 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1736188986.6390493,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
        "ctime": 1736188986.6440494,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 163578054,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1736188986.2310452,
        "nlink": 1,
        "path": "/etc/containers/systemd/quadlet-demo.network",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 74,
        "uid": 0,
        "version": "2881722664",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Monday 06 January 2025  13:45:58 -0500 (0:00:00.379)       0:03:28.926 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 06 January 2025  13:45:58 -0500 (0:00:00.062)       0:03:28.988 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 06 January 2025  13:45:58 -0500 (0:00:00.363)       0:03:29.351 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Monday 06 January 2025  13:45:58 -0500 (0:00:00.052)       0:03:29.404 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Monday 06 January 2025  13:45:58 -0500 (0:00:00.036)       0:03:29.440 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_raw": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Monday 06 January 2025  13:45:58 -0500 (0:00:00.035)       0:03:29.476 ******** 
changed: [managed-node3] => {
    "changed": true,
    "path": "/etc/containers/systemd/quadlet-demo.network",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Monday 06 January 2025  13:45:59 -0500 (0:00:00.392)       0:03:29.869 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Monday 06 January 2025  13:45:59 -0500 (0:00:00.774)       0:03:30.644 ******** 
changed: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Monday 06 January 2025  13:46:00 -0500 (0:00:00.441)       0:03:31.085 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Monday 06 January 2025  13:46:00 -0500 (0:00:00.078)       0:03:31.164 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Monday 06 January 2025  13:46:00 -0500 (0:00:00.064)       0:03:31.229 ******** 
changed: [managed-node3] => {
    "changed": true,
    "cmd": [
        "podman",
        "image",
        "prune",
        "--all",
        "-f"
    ],
    "delta": "0:00:00.030501",
    "end": "2025-01-06 13:46:00.881102",
    "rc": 0,
    "start": "2025-01-06 13:46:00.850601"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Monday 06 January 2025  13:46:00 -0500 (0:00:00.422)       0:03:31.652 ******** 
included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 06 January 2025  13:46:01 -0500 (0:00:00.072)       0:03:31.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 06 January 2025  13:46:01 -0500 (0:00:00.035)       0:03:31.760 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 06 January 2025  13:46:01 -0500 (0:00:00.039)       0:03:31.799 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Monday 06 January 2025  13:46:01 -0500 (0:00:00.051)       0:03:31.851 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "images",
        "-n"
    ],
    "delta": "0:00:00.031757",
    "end": "2025-01-06 13:46:01.545839",
    "rc": 0,
    "start": "2025-01-06 13:46:01.514082"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Monday 06 January 2025  13:46:01 -0500 (0:00:00.459)       0:03:32.310 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "volume",
        "ls",
        "-n"
    ],
    "delta": "0:00:00.030027",
    "end": "2025-01-06 13:46:01.947811",
    "rc": 0,
    "start": "2025-01-06 13:46:01.917784"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Monday 06 January 2025  13:46:02 -0500 (0:00:00.410)       0:03:32.721 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "ps",
        "--noheading"
    ],
    "delta": "0:00:00.028499",
    "end": "2025-01-06 13:46:02.367275",
    "rc": 0,
    "start": "2025-01-06 13:46:02.338776"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Monday 06 January 2025  13:46:02 -0500 (0:00:00.415)       0:03:33.136 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "podman",
        "network",
        "ls",
        "-n",
        "-q"
    ],
    "delta": "0:00:00.028499",
    "end": "2025-01-06 13:46:02.797892",
    "rc": 0,
    "start": "2025-01-06 13:46:02.769393"
}

STDOUT:

podman

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Monday 06 January 2025  13:46:02 -0500 (0:00:00.426)       0:03:33.562 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Monday 06 January 2025  13:46:03 -0500 (0:00:00.420)       0:03:33.983 ******** 
ok: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Monday 06 January 2025  13:46:03 -0500 (0:00:00.446)       0:03:34.429 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "certmonger.service": {
                "name": "certmonger.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "netavark-dhcp-proxy.service": {
                "name": "netavark-dhcp-proxy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "netavark-firewalld-reload.service": {
                "name": "netavark-firewalld-reload.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "podman-auto-update.service": {
                "name": "podman-auto-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-clean-transient.service": {
                "name": "podman-clean-transient.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman-kube@.service": {
                "name": "podman-kube@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "podman-restart.service": {
                "name": "podman-restart.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "podman.service": {
                "name": "podman.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Monday 06 January 2025  13:46:05 -0500 (0:00:01.894)       0:03:36.324 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Monday 06 January 2025  13:46:05 -0500 (0:00:00.034)       0:03:36.359 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Monday 06 January 2025  13:46:05 -0500 (0:00:00.031)       0:03:36.390 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Monday 06 January 2025  13:46:05 -0500 (0:00:00.032)       0:03:36.423 ******** 
skipping: [managed-node3] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [Ensure no resources] *****************************************************
task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188
Monday 06 January 2025  13:46:05 -0500 (0:00:00.050)       0:03:36.474 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

PLAY RECAP *********************************************************************
managed-node3              : ok=429  changed=48   unreachable=0    failed=1    skipped=442  rescued=1    ignored=0   


TASKS RECAP ********************************************************************
Monday 06 January 2025  13:46:05 -0500 (0:00:00.036)       0:03:36.510 ******** 
=============================================================================== 
Check web -------------------------------------------------------------- 33.08s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 
fedora.linux_system_roles.podman : Ensure container images are present -- 17.87s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 
fedora.linux_system_roles.podman : Ensure container images are present --- 7.13s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.90s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.69s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.34s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
fedora.linux_system_roles.podman : Stop and disable service ------------- 2.13s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.96s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.93s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.89s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.86s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.85s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 
Gathering Facts --------------------------------------------------------- 1.55s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 
fedora.linux_system_roles.podman : Start service ------------------------ 1.54s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.41s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 
fedora.linux_system_roles.podman : Stop and disable service ------------- 1.40s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 
fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.31s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 
fedora.linux_system_roles.certificate : Remove files -------------------- 1.30s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.26s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 
fedora.linux_system_roles.podman : Prune images no longer in use -------- 1.19s
/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120