ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-PRc executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 18 January 2025 11:30:27 -0500 (0:00:00.008) 0:00:00.008 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-JDA/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 18 January 2025 11:30:27 -0500 (0:00:00.024) 0:00:00.033 ****** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 18 January 2025 11:30:29 -0500 (0:00:01.211) 0:00:01.244 ****** skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.015) 0:00:01.260 ****** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.008) 0:00:01.268 ****** included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.037) 0:00:01.305 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.022) 0:00:01.327 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.034) 0:00:01.361 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.478) 0:00:01.840 ****** ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.034) 0:00:01.874 ****** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.066) 0:00:01.941 ****** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64", "Installed: python3-pyasn1-0.6.1-1.el10.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 18 January 2025 11:30:32 -0500 (0:00:02.210) 0:00:04.152 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64", "Installed: nss-sysinit-3.101.0-13.el10.x86_64", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 18 January 2025 11:30:35 -0500 (0:00:03.144) 0:00:07.297 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.581) 0:00:07.879 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.406) 0:00:08.285 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network.target system.slice dbus-broker.service basic.target syslog.target sysinit.target dbus.socket systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3204444160", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 18 January 2025 11:30:37 -0500 (0:00:01.105) 0:00:09.391 ****** changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 18 January 2025 11:30:38 -0500 (0:00:00.937) 0:00:10.328 ****** ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQk10ZzZFUWlUVm1xL2FmRHZWcXg1REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURSagpZall3WlRndE5EUXlNalJrTlRrdFlXRm1aR0UzWXpNdFltUTFZV0l4WlRNd0hoY05NalV3TVRFNE1UWXpNRE00CldoY05Nall3TVRFNE1UWXpNRE0zV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzVxejF6bzdiM1J4RFp3TFF6dzNDcjhwYkNoQm9QeHZlTgpvay9WU1BXY2VGQXZPY1BRYklobEZYTXQ1b3daUEFvSkhnWHZkQUp2WFZua0xUZ3NxY1NzaDhucWRyczN2YW0vCklnU1FRbVlSUXhwYTc3L3dpODJleS95WHl5aW95TVByNlMrTHR2TFlhazB3c2NrSnhUVWlqbkg3alBkLzUrSm0KRnZscGI5UlNHc2Q4SUtteU5NTUhNM1pyKyt2OUd0TFoyYXFOQ2t0N3dUZE92MmtCUmF6UGNOVWRyM25XWTcwUgpmMzROYzZNL1k3NDg5NjZhWlVtdWlIMXJFajEyMkUyZ0JOUWlWUnp1KzlvdWFRUDlPNGduQi8rQkpNK0JqTWFTCmRCaHBMdjhWeDY1TmUrZWxZVHhWanBtV0JXc1lvcGVoOCtRL3Z5NzlFK1FaT0R4L1A4dGZBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVHZU9RCk1TY3JZdGpFZ3VhNnpMQnFhcmxhaFZZd0h3WURWUjBqQkJnd0ZvQVVBVG9QZWNsWVRDM253M1BzSWN1akUydkYKTEJzd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFCcHF0ZWFielN0a1VhVStmR1hDSkN5bmxyN3pXQ0V3TXJKcApVR1R5dGVhRXppVmwwNzllUnUrVFpwVHpVM1N0ZlJ0VnQzbmI2OENIcG9EdDBpUVpNaWQ3MDJnc1BuNFJKTlgvCmsyMnd5WDlFRkRjZzRhai9JYTBCS2FlUUFXbmR5d2ZDWSsrcVhqWEdNUmZJVGRUenB2RnQ1QzU0UWJHbURMYnUKVGoyNm1uWWM0SGMyYzdSL25XSnZLWUxFZFA2SGhKRTJMUzNydGRSU0xhMmJ3ZmdSTk9za011eVRUc0MreXR3MAprNjJaQkFraEtMR3NuUzV4V092dnZlQ2x2ekgvSGtBZ2FlZThTRWlRelQ1MVZuVGtPb1p6Y2hDR2NuUUFuUmx2CkszQVZ0WTVzTEtsQWR0V0ppNCtzTlo4S1JiZ0ltbzhoVkQ0Tysxa20yT1k0WGZNQVJBZz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRQzVxejF6bzdiM1J4RFoKd0xRenczQ3I4cGJDaEJvUHh2ZU5vay9WU1BXY2VGQXZPY1BRYklobEZYTXQ1b3daUEFvSkhnWHZkQUp2WFZuawpMVGdzcWNTc2g4bnFkcnMzdmFtL0lnU1FRbVlSUXhwYTc3L3dpODJleS95WHl5aW95TVByNlMrTHR2TFlhazB3CnNja0p4VFVpam5IN2pQZC81K0ptRnZscGI5UlNHc2Q4SUtteU5NTUhNM1pyKyt2OUd0TFoyYXFOQ2t0N3dUZE8KdjJrQlJhelBjTlVkcjNuV1k3MFJmMzROYzZNL1k3NDg5NjZhWlVtdWlIMXJFajEyMkUyZ0JOUWlWUnp1KzlvdQphUVA5TzRnbkIvK0JKTStCak1hU2RCaHBMdjhWeDY1TmUrZWxZVHhWanBtV0JXc1lvcGVoOCtRL3Z5NzlFK1FaCk9EeC9QOHRmQWdNQkFBRUNnZ0VBUXo4V3dFSHMvRTNFZUNHNEFzR0lGdmNEaklzZEIxYlloWGRpczh5N2pJTDEKQTgzSHNyOEVmWndSdmUreEVWaXlCOUJVZFFCTyszZlRuWFdWVVhQaGxGMlFEN2dXSWQrelVkTW4xUG4vanNQLwprUzBobHlXRGdUNi9pMWpkTHBpY2dWN1JSaWlxZHlDMU9XeEM5S04xRmNHQzNnbVdBTUVET2xlYTJRblVYREhSCmRNdWVtQVpjYU9LamloNmxVM3RQdE5KemxsTDdpZnp5WkM0NGR5RHlVbm5DVFRuR0w4QjlocGpCTzJPQ3Rma2IKZDZDdnJCWFNnK3VEOUxIeUpzYTM5SzdleUFFdTh6RDQrWVlKVnozVkppUzJOaUtxZHFkVmhOcFA1VnRHNDQ0UQpQYVJyUENOdlI3OW5IR3MwMm9WNXVnREc2bllQODZZQ0dsdlJFWjZ6b1FLQmdRRHFWNitzNEJPTUY4NnFYaHNxCmdaeDEvSkpXQnErWEl4bkRqd0RoVGFrSi9QQlJNMTlCRWlvNmN1U3pqQmtKci9qRWZQYWs3clZhcnVyeGJVRXEKcE10K2IvbmdjQ0J4VnBWSk9hVU9zd1RHR3VFd3hTRGhxWjhCbkhGLzI0ZExvVWdrYnU4NGEvbkhiQkVmNjI2YQp6WUs2MFd4WVRDWHE1a25RNjc5b1BDSDJZUUtCZ1FESzAvdmVRK0FEbkpjOVhXWlVYazdGaFhXZGhVa3N3RkNtClZjVFZGM0NDYUpkMzVUNWFFSzJielVDaWtReUpVQXYxcm5kQ3RHRk1hbzMxVm55OXd2ZndEUVVtWXlncVhVMzkKbXVOTkVuRkFoanE1U05ra3cxWTh0MnBWdTRuUUdpQUo5TUo3Vk1wL054cGNwcXBTSjNSa3NNdGFkTTNkb09YcAo4LzVvbEVtWnZ3S0JnQVIrcEE3enlVd3hsTFF0MCtXUDBObHVlN3dNcXNlQ1U5Y3RZZjN3U0dLNXFENVdkdWcvCm1mRy9ER1hwTlAyN2sxRHBlSzJYS29GUHc0L1g0WjBZeGhKcHdXZ3RubUd1SUhUa0Z5ZGVCYVJyRlpDaTUwdU0KdmxFQW1DaTBwRGF1SkQwV3A1MzBiRGtIK0pnZEl1VHpOaU94S3B2UzhoTThVWVFoVVcyRlliMWhBb0dBVUxPZgoxNFB5TWYyd1AzNVdKT09SUEE0cUNnN3FXcC82ZHZSUmFPZjJvQ3kxZDhNQzJCc2l1SWNtWElpU0VHQ0ZJYS9WClMva3hiWkpJQVlIMENCejNRZEFzYUdXenFZZU9iSVdnUXg0L2FZaisvSkg3dENEMHd4dkhkMjB6SEZZSXdXMWQKcEd6TVZXbTdWM29JOHI4NEZJZ3pvVDR3V3lxVjBFeUF3V09mRlVrQ2dZQTRzLzlhN2txbU04a25tdWNRNzlnbApBU0dQbmhsVFBUa1RRSDVIcjNrL2JKMmZadzFxRG51by9vVGRKaXhYeU55RFdYSFhpUTRVWHpyM21zbXI5eHZZCjNxbTVjRTFLajB3elI3UVBNVk1NaWFmczczaEVrM0NkamUvWnBUQldPaE5yV0lKak1TRWYwenFlWlQ5MUUxNVMKaTVJTHQ2VFFPVDNBN0l5YVdFOWRVdz09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQk10ZzZFUWlUVm1xL2FmRHZWcXg1REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURSagpZall3WlRndE5EUXlNalJrTlRrdFlXRm1aR0UzWXpNdFltUTFZV0l4WlRNd0hoY05NalV3TVRFNE1UWXpNRE00CldoY05Nall3TVRFNE1UWXpNRE0zV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzVxejF6bzdiM1J4RFp3TFF6dzNDcjhwYkNoQm9QeHZlTgpvay9WU1BXY2VGQXZPY1BRYklobEZYTXQ1b3daUEFvSkhnWHZkQUp2WFZua0xUZ3NxY1NzaDhucWRyczN2YW0vCklnU1FRbVlSUXhwYTc3L3dpODJleS95WHl5aW95TVByNlMrTHR2TFlhazB3c2NrSnhUVWlqbkg3alBkLzUrSm0KRnZscGI5UlNHc2Q4SUtteU5NTUhNM1pyKyt2OUd0TFoyYXFOQ2t0N3dUZE92MmtCUmF6UGNOVWRyM25XWTcwUgpmMzROYzZNL1k3NDg5NjZhWlVtdWlIMXJFajEyMkUyZ0JOUWlWUnp1KzlvdWFRUDlPNGduQi8rQkpNK0JqTWFTCmRCaHBMdjhWeDY1TmUrZWxZVHhWanBtV0JXc1lvcGVoOCtRL3Z5NzlFK1FaT0R4L1A4dGZBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVHZU9RCk1TY3JZdGpFZ3VhNnpMQnFhcmxhaFZZd0h3WURWUjBqQkJnd0ZvQVVBVG9QZWNsWVRDM253M1BzSWN1akUydkYKTEJzd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFCcHF0ZWFielN0a1VhVStmR1hDSkN5bmxyN3pXQ0V3TXJKcApVR1R5dGVhRXppVmwwNzllUnUrVFpwVHpVM1N0ZlJ0VnQzbmI2OENIcG9EdDBpUVpNaWQ3MDJnc1BuNFJKTlgvCmsyMnd5WDlFRkRjZzRhai9JYTBCS2FlUUFXbmR5d2ZDWSsrcVhqWEdNUmZJVGRUenB2RnQ1QzU0UWJHbURMYnUKVGoyNm1uWWM0SGMyYzdSL25XSnZLWUxFZFA2SGhKRTJMUzNydGRSU0xhMmJ3ZmdSTk9za011eVRUc0MreXR3MAprNjJaQkFraEtMR3NuUzV4V092dnZlQ2x2ekgvSGtBZ2FlZThTRWlRelQ1MVZuVGtPb1p6Y2hDR2NuUUFuUmx2CkszQVZ0WTVzTEtsQWR0V0ppNCtzTlo4S1JiZ0ltbzhoVkQ0Tysxa20yT1k0WGZNQVJBZz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 18 January 2025 11:30:39 -0500 (0:00:01.166) 0:00:11.494 ****** ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC5qz1zo7b3RxDZ\nwLQzw3Cr8pbChBoPxveNok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnk\nLTgsqcSsh8nqdrs3vam/IgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0w\nsckJxTUijnH7jPd/5+JmFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdO\nv2kBRazPcNUdr3nWY70Rf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ou\naQP9O4gnB/+BJM+BjMaSdBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZ\nODx/P8tfAgMBAAECggEAQz8WwEHs/E3EeCG4AsGIFvcDjIsdB1bYhXdis8y7jIL1\nA83Hsr8EfZwRve+xEViyB9BUdQBO+3fTnXWVUXPhlF2QD7gWId+zUdMn1Pn/jsP/\nkS0hlyWDgT6/i1jdLpicgV7RRiiqdyC1OWxC9KN1FcGC3gmWAMEDOlea2QnUXDHR\ndMuemAZcaOKjih6lU3tPtNJzllL7ifzyZC44dyDyUnnCTTnGL8B9hpjBO2OCtfkb\nd6CvrBXSg+uD9LHyJsa39K7eyAEu8zD4+YYJVz3VJiS2NiKqdqdVhNpP5VtG444Q\nPaRrPCNvR79nHGs02oV5ugDG6nYP86YCGlvREZ6zoQKBgQDqV6+s4BOMF86qXhsq\ngZx1/JJWBq+XIxnDjwDhTakJ/PBRM19BEio6cuSzjBkJr/jEfPak7rVarurxbUEq\npMt+b/ngcCBxVpVJOaUOswTGGuEwxSDhqZ8BnHF/24dLoUgkbu84a/nHbBEf626a\nzYK60WxYTCXq5knQ679oPCH2YQKBgQDK0/veQ+ADnJc9XWZUXk7FhXWdhUkswFCm\nVcTVF3CCaJd35T5aEK2bzUCikQyJUAv1rndCtGFMao31Vny9wvfwDQUmYygqXU39\nmuNNEnFAhjq5SNkkw1Y8t2pVu4nQGiAJ9MJ7VMp/NxpcpqpSJ3RksMtadM3doOXp\n8/5olEmZvwKBgAR+pA7zyUwxlLQt0+WP0Nlue7wMqseCU9ctYf3wSGK5qD5Wdug/\nmfG/DGXpNP27k1DpeK2XKoFPw4/X4Z0YxhJpwWgtnmGuIHTkFydeBaRrFZCi50uM\nvlEAmCi0pDauJD0Wp530bDkH+JgdIuTzNiOxKpvS8hM8UYQhUW2FYb1hAoGAULOf\n14PyMf2wP35WJOORPA4qCg7qWp/6dvRRaOf2oCy1d8MC2BsiuIcmXIiSEGCFIa/V\nS/kxbZJIAYH0CBz3QdAsaGWzqYeObIWgQx4/aYj+/JH7tCD0wxvHd20zHFYIwW1d\npGzMVWm7V3oI8r84FIgzoT4wWyqV0EyAwWOfFUkCgYA4s/9a7kqmM8knmucQ79gl\nASGPnhlTPTkTQH5Hr3k/bJ2fZw1qDnuo/oTdJixXyNyDWXHXiQ4UXzr3msmr9xvY\n3qm5cE1Kj0wzR7QPMVMMiafs73hEk3Cdje/ZpTBWOhNrWIJjMSEf0zqeZT91E15S\ni5ILt6TQOT3A7IyaWE9dUw==\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 18 January 2025 11:30:39 -0500 (0:00:00.038) 0:00:11.533 ****** ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC5qz1zo7b3RxDZ\nwLQzw3Cr8pbChBoPxveNok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnk\nLTgsqcSsh8nqdrs3vam/IgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0w\nsckJxTUijnH7jPd/5+JmFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdO\nv2kBRazPcNUdr3nWY70Rf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ou\naQP9O4gnB/+BJM+BjMaSdBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZ\nODx/P8tfAgMBAAECggEAQz8WwEHs/E3EeCG4AsGIFvcDjIsdB1bYhXdis8y7jIL1\nA83Hsr8EfZwRve+xEViyB9BUdQBO+3fTnXWVUXPhlF2QD7gWId+zUdMn1Pn/jsP/\nkS0hlyWDgT6/i1jdLpicgV7RRiiqdyC1OWxC9KN1FcGC3gmWAMEDOlea2QnUXDHR\ndMuemAZcaOKjih6lU3tPtNJzllL7ifzyZC44dyDyUnnCTTnGL8B9hpjBO2OCtfkb\nd6CvrBXSg+uD9LHyJsa39K7eyAEu8zD4+YYJVz3VJiS2NiKqdqdVhNpP5VtG444Q\nPaRrPCNvR79nHGs02oV5ugDG6nYP86YCGlvREZ6zoQKBgQDqV6+s4BOMF86qXhsq\ngZx1/JJWBq+XIxnDjwDhTakJ/PBRM19BEio6cuSzjBkJr/jEfPak7rVarurxbUEq\npMt+b/ngcCBxVpVJOaUOswTGGuEwxSDhqZ8BnHF/24dLoUgkbu84a/nHbBEf626a\nzYK60WxYTCXq5knQ679oPCH2YQKBgQDK0/veQ+ADnJc9XWZUXk7FhXWdhUkswFCm\nVcTVF3CCaJd35T5aEK2bzUCikQyJUAv1rndCtGFMao31Vny9wvfwDQUmYygqXU39\nmuNNEnFAhjq5SNkkw1Y8t2pVu4nQGiAJ9MJ7VMp/NxpcpqpSJ3RksMtadM3doOXp\n8/5olEmZvwKBgAR+pA7zyUwxlLQt0+WP0Nlue7wMqseCU9ctYf3wSGK5qD5Wdug/\nmfG/DGXpNP27k1DpeK2XKoFPw4/X4Z0YxhJpwWgtnmGuIHTkFydeBaRrFZCi50uM\nvlEAmCi0pDauJD0Wp530bDkH+JgdIuTzNiOxKpvS8hM8UYQhUW2FYb1hAoGAULOf\n14PyMf2wP35WJOORPA4qCg7qWp/6dvRRaOf2oCy1d8MC2BsiuIcmXIiSEGCFIa/V\nS/kxbZJIAYH0CBz3QdAsaGWzqYeObIWgQx4/aYj+/JH7tCD0wxvHd20zHFYIwW1d\npGzMVWm7V3oI8r84FIgzoT4wWyqV0EyAwWOfFUkCgYA4s/9a7kqmM8knmucQ79gl\nASGPnhlTPTkTQH5Hr3k/bJ2fZw1qDnuo/oTdJixXyNyDWXHXiQ4UXzr3msmr9xvY\n3qm5cE1Kj0wzR7QPMVMMiafs73hEk3Cdje/ZpTBWOhNrWIJjMSEf0zqeZT91E15S\ni5ILt6TQOT3A7IyaWE9dUw==\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.026974", "end": "2025-01-18 11:30:39.844601", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQBMtg6EQiTVmq/afDvVqx5DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDRj\nYjYwZTgtNDQyMjRkNTktYWFmZGE3YzMtYmQ1YWIxZTMwHhcNMjUwMTE4MTYzMDM4\nWhcNMjYwMTE4MTYzMDM3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5qz1zo7b3RxDZwLQzw3Cr8pbChBoPxveN\nok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnkLTgsqcSsh8nqdrs3vam/\nIgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0wsckJxTUijnH7jPd/5+Jm\nFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdOv2kBRazPcNUdr3nWY70R\nf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ouaQP9O4gnB/+BJM+BjMaS\ndBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZODx/P8tfAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGeOQ\nMScrYtjEgua6zLBqarlahVYwHwYDVR0jBBgwFoAUAToPeclYTC3nw3PsIcujE2vF\nLBswDQYJKoZIhvcNAQELBQADggEBABpqteabzStkUaU+fGXCJCynlr7zWCEwMrJp\nUGTyteaEziVl079eRu+TZpTzU3StfRtVt3nb68CHpoDt0iQZMid702gsPn4RJNX/\nk22wyX9EFDcg4aj/Ia0BKaeQAWndywfCY++qXjXGMRfITdTzpvFt5C54QbGmDLbu\nTj26mnYc4Hc2c7R/nWJvKYLEdP6HhJE2LS3rtdRSLa2bwfgRNOskMuyTTsC+ytw0\nk62ZBAkhKLGsnS5xWOvvveClvzH/HkAgaee8SEiQzT51VnTkOoZzchCGcnQAnRlv\nK3AVtY5sLKlAdtWJi4+sNZ8KRbgImo8hVD4O+1km2OY4XfMARAg=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC5qz1zo7b3RxDZ\nwLQzw3Cr8pbChBoPxveNok/VSPWceFAvOcPQbIhlFXMt5owZPAoJHgXvdAJvXVnk\nLTgsqcSsh8nqdrs3vam/IgSQQmYRQxpa77/wi82ey/yXyyioyMPr6S+LtvLYak0w\nsckJxTUijnH7jPd/5+JmFvlpb9RSGsd8IKmyNMMHM3Zr++v9GtLZ2aqNCkt7wTdO\nv2kBRazPcNUdr3nWY70Rf34Nc6M/Y748966aZUmuiH1rEj122E2gBNQiVRzu+9ou\naQP9O4gnB/+BJM+BjMaSdBhpLv8Vx65Ne+elYTxVjpmWBWsYopeh8+Q/vy79E+QZ\nODx/P8tfAgMBAAECggEAQz8WwEHs/E3EeCG4AsGIFvcDjIsdB1bYhXdis8y7jIL1\nA83Hsr8EfZwRve+xEViyB9BUdQBO+3fTnXWVUXPhlF2QD7gWId+zUdMn1Pn/jsP/\nkS0hlyWDgT6/i1jdLpicgV7RRiiqdyC1OWxC9KN1FcGC3gmWAMEDOlea2QnUXDHR\ndMuemAZcaOKjih6lU3tPtNJzllL7ifzyZC44dyDyUnnCTTnGL8B9hpjBO2OCtfkb\nd6CvrBXSg+uD9LHyJsa39K7eyAEu8zD4+YYJVz3VJiS2NiKqdqdVhNpP5VtG444Q\nPaRrPCNvR79nHGs02oV5ugDG6nYP86YCGlvREZ6zoQKBgQDqV6+s4BOMF86qXhsq\ngZx1/JJWBq+XIxnDjwDhTakJ/PBRM19BEio6cuSzjBkJr/jEfPak7rVarurxbUEq\npMt+b/ngcCBxVpVJOaUOswTGGuEwxSDhqZ8BnHF/24dLoUgkbu84a/nHbBEf626a\nzYK60WxYTCXq5knQ679oPCH2YQKBgQDK0/veQ+ADnJc9XWZUXk7FhXWdhUkswFCm\nVcTVF3CCaJd35T5aEK2bzUCikQyJUAv1rndCtGFMao31Vny9wvfwDQUmYygqXU39\nmuNNEnFAhjq5SNkkw1Y8t2pVu4nQGiAJ9MJ7VMp/NxpcpqpSJ3RksMtadM3doOXp\n8/5olEmZvwKBgAR+pA7zyUwxlLQt0+WP0Nlue7wMqseCU9ctYf3wSGK5qD5Wdug/\nmfG/DGXpNP27k1DpeK2XKoFPw4/X4Z0YxhJpwWgtnmGuIHTkFydeBaRrFZCi50uM\nvlEAmCi0pDauJD0Wp530bDkH+JgdIuTzNiOxKpvS8hM8UYQhUW2FYb1hAoGAULOf\n14PyMf2wP35WJOORPA4qCg7qWp/6dvRRaOf2oCy1d8MC2BsiuIcmXIiSEGCFIa/V\nS/kxbZJIAYH0CBz3QdAsaGWzqYeObIWgQx4/aYj+/JH7tCD0wxvHd20zHFYIwW1d\npGzMVWm7V3oI8r84FIgzoT4wWyqV0EyAwWOfFUkCgYA4s/9a7kqmM8knmucQ79gl\nASGPnhlTPTkTQH5Hr3k/bJ2fZw1qDnuo/oTdJixXyNyDWXHXiQ4UXzr3msmr9xvY\n3qm5cE1Kj0wzR7QPMVMMiafs73hEk3Cdje/ZpTBWOhNrWIJjMSEf0zqeZT91E15S\ni5ILt6TQOT3A7IyaWE9dUw==\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-01-18 11:30:39.817627" } STDOUT: Request "20250118163037" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 18 January 2025 11:30:39 -0500 (0:00:00.497) 0:00:12.031 ****** changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 18 January 2025 11:30:41 -0500 (0:00:01.107) 0:00:13.138 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.061) 0:00:13.200 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.023) 0:00:13.224 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.036) 0:00:13.260 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.366) 0:00:13.626 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.023) 0:00:13.650 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.370) 0:00:14.020 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.022) 0:00:14.043 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:30:41 -0500 (0:00:00.042) 0:00:14.085 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:30:42 -0500 (0:00:01.041) 0:00:15.126 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.044) 0:00:15.171 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.048) 0:00:15.219 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.044) 0:00:15.264 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.046) 0:00:15.310 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.043) 0:00:15.354 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024770", "end": "2025-01-18 11:30:43.560913", "rc": 0, "start": "2025-01-18 11:30:43.536143" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.404) 0:00:15.759 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.029) 0:00:15.788 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.030) 0:00:15.818 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.094) 0:00:15.913 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.115) 0:00:16.028 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:30:43 -0500 (0:00:00.064) 0:00:16.093 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.044) 0:00:16.137 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.056) 0:00:16.193 ****** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.464) 0:00:16.658 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.031) 0:00:16.690 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.039) 0:00:16.729 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:44 -0500 (0:00:00.397) 0:00:17.126 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.033) 0:00:17.160 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.189 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.030) 0:00:17.220 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.028) 0:00:17.248 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.277 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.028) 0:00:17.306 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.336 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.365 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.054) 0:00:17.419 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.053) 0:00:17.472 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.502 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.060) 0:00:17.562 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.054) 0:00:17.617 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.646 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.029) 0:00:17.675 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.054) 0:00:17.730 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.030) 0:00:17.760 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.028) 0:00:17.788 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.075) 0:00:17.864 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.048) 0:00:17.912 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.033) 0:00:17.946 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.033) 0:00:17.980 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.031) 0:00:18.012 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 18 January 2025 11:30:45 -0500 (0:00:00.103) 0:00:18.116 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.074) 0:00:18.191 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.033) 0:00:18.225 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.413) 0:00:18.638 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.033) 0:00:18.672 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.382) 0:00:19.055 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.036) 0:00:19.091 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.848) 0:00:19.940 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.035) 0:00:19.976 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.035) 0:00:20.011 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.032) 0:00:20.044 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.030) 0:00:20.074 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.040) 0:00:20.115 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:28:23 EST", "ActiveEnterTimestampMonotonic": "296477974", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus-broker.service dbus.socket polkit.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:28:22 EST", "AssertTimestampMonotonic": "295763956", "Before": "shutdown.target multi-user.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "484727000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:28:22 EST", "ConditionTimestampMonotonic": "295763953", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ipset.service shutdown.target iptables.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainHandoffTimestampMonotonic": "295794898", "ExecMainPID": "10780", "ExecMainStartTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainStartTimestampMonotonic": "295767198", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:28:22 EST", "InactiveExitTimestampMonotonic": "295767914", "InvocationID": "570bf0d320324c45b6e7e82fd441da39", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10780", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3220193280", "MemoryCurrent": "34250752", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34856960", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:37 EST", "StateChangeTimestampMonotonic": "430294911", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 18 January 2025 11:30:48 -0500 (0:00:00.549) 0:00:20.665 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:28:23 EST", "ActiveEnterTimestampMonotonic": "296477974", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus-broker.service dbus.socket polkit.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:28:22 EST", "AssertTimestampMonotonic": "295763956", "Before": "shutdown.target multi-user.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "484727000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:28:22 EST", "ConditionTimestampMonotonic": "295763953", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ipset.service shutdown.target iptables.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainHandoffTimestampMonotonic": "295794898", "ExecMainPID": "10780", "ExecMainStartTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainStartTimestampMonotonic": "295767198", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:28:22 EST", "InactiveExitTimestampMonotonic": "295767914", "InvocationID": "570bf0d320324c45b6e7e82fd441da39", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10780", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3222044672", "MemoryCurrent": "34250752", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34856960", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:37 EST", "StateChangeTimestampMonotonic": "430294911", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 18 January 2025 11:30:49 -0500 (0:00:00.545) 0:00:21.211 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 18 January 2025 11:30:49 -0500 (0:00:00.041) 0:00:21.252 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 18 January 2025 11:30:49 -0500 (0:00:00.030) 0:00:21.282 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 18 January 2025 11:30:49 -0500 (0:00:00.046) 0:00:21.329 ****** changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 18 January 2025 11:30:50 -0500 (0:00:01.117) 0:00:22.447 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.044) 0:00:22.491 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.032) 0:00:22.524 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.029) 0:00:22.553 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.031) 0:00:22.585 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.028) 0:00:22.613 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.063) 0:00:22.676 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.043) 0:00:22.720 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.030) 0:00:22.750 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.030) 0:00:22.781 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.026) 0:00:22.807 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.027) 0:00:22.835 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.129) 0:00:22.965 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.033) 0:00:22.998 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.053) 0:00:23.052 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.032) 0:00:23.085 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:50 -0500 (0:00:00.034) 0:00:23.119 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.045) 0:00:23.164 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.047) 0:00:23.212 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.035) 0:00:23.248 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.033) 0:00:23.282 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.035) 0:00:23.317 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.038) 0:00:23.356 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.044) 0:00:23.401 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.044) 0:00:23.445 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.062) 0:00:23.508 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.030) 0:00:23.538 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.040) 0:00:23.579 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.075) 0:00:23.655 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.049) 0:00:23.705 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.046) 0:00:23.752 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.049) 0:00:23.801 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.048) 0:00:23.850 ****** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.692) 0:00:24.543 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.033) 0:00:24.576 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.053) 0:00:24.630 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.033) 0:00:24.664 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.033) 0:00:24.697 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.043) 0:00:24.740 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.030) 0:00:24.771 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.032) 0:00:24.803 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.030) 0:00:24.834 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.032) 0:00:24.866 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.029) 0:00:24.895 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.082) 0:00:24.978 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.050) 0:00:25.028 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.045) 0:00:25.074 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.046) 0:00:25.121 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.067) 0:00:25.188 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.097) 0:00:25.286 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.048) 0:00:25.334 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.049) 0:00:25.384 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.047) 0:00:25.432 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.063) 0:00:25.495 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.571) 0:00:26.067 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.055) 0:00:26.123 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.085) 0:00:26.209 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.053) 0:00:26.262 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.053) 0:00:26.316 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.065) 0:00:26.382 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.052) 0:00:26.434 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.049) 0:00:26.483 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.050) 0:00:26.533 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.077) 0:00:26.610 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.122) 0:00:26.733 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.050) 0:00:26.783 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.049) 0:00:26.833 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.034) 0:00:26.867 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.036) 0:00:26.904 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.043) 0:00:26.947 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.058) 0:00:27.005 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.028) 0:00:27.034 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.027) 0:00:27.061 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.029) 0:00:27.091 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.027) 0:00:27.118 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.557) 0:00:27.676 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.051) 0:00:27.727 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.151) 0:00:27.879 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.042) 0:00:27.922 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.036) 0:00:27.958 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.030) 0:00:27.989 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.050) 0:00:28.040 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.143) 0:00:28.183 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.051) 0:00:28.234 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.052) 0:00:28.287 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.067) 0:00:28.354 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.452) 0:00:28.806 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.053) 0:00:28.860 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.046) 0:00:28.906 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.048) 0:00:28.954 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.050) 0:00:29.004 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.053) 0:00:29.058 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.050) 0:00:29.109 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.053) 0:00:29.162 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.051) 0:00:29.213 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.085) 0:00:29.299 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.054) 0:00:29.354 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.048) 0:00:29.402 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.122) 0:00:29.525 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.062) 0:00:29.587 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.044) 0:00:29.632 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.113) 0:00:29.746 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.201) 0:00:29.947 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.053) 0:00:30.000 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.046) 0:00:30.047 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.049) 0:00:30.096 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.045) 0:00:30.142 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.052) 0:00:30.195 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.440) 0:00:30.635 ****** changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1737217858.56248-14288-74493159750705/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.841) 0:00:31.476 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.031) 0:00:31.507 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.031) 0:00:31.539 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.762) 0:00:32.302 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target network-online.target systemd-journald.socket -.mount basic.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3217375232", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.665) 0:00:32.967 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.034) 0:00:33.002 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.072) 0:00:33.074 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.048) 0:00:33.123 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.034) 0:00:33.158 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.049) 0:00:33.207 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.055) 0:00:33.262 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.032) 0:00:33.295 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.070) 0:00:33.365 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.040) 0:00:33.406 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.425) 0:00:33.832 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.052) 0:00:33.884 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.058) 0:00:33.942 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.051) 0:00:33.993 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.055) 0:00:34.049 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.066) 0:00:34.116 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.075) 0:00:34.192 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.056) 0:00:34.248 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.040) 0:00:34.288 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.055) 0:00:34.343 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.039) 0:00:34.383 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.033) 0:00:34.416 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.073) 0:00:34.490 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.035) 0:00:34.526 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.028) 0:00:34.554 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.065) 0:00:34.620 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.081) 0:00:34.701 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.032) 0:00:34.734 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.077) 0:00:34.812 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.034) 0:00:34.846 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.028) 0:00:34.874 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.029) 0:00:34.904 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.396) 0:00:35.300 ****** changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1737217863.216999-14492-11702154848732/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.748) 0:00:36.048 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.051) 0:00:36.100 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:31:04 -0500 (0:00:00.046) 0:00:36.146 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:04 -0500 (0:00:00.766) 0:00:36.913 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice -.mount basic.target systemd-journald.socket sysinit.target network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3203649536", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.618) 0:00:37.531 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.032) 0:00:37.563 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.116) 0:00:37.679 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.062) 0:00:37.742 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.038) 0:00:37.781 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.054) 0:00:37.835 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.066) 0:00:37.902 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.034) 0:00:37.936 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.032) 0:00:37.969 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.041) 0:00:38.010 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.402) 0:00:38.412 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.031) 0:00:38.444 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.069) 0:00:38.513 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.032) 0:00:38.546 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.031) 0:00:38.577 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.035) 0:00:38.613 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.041) 0:00:38.655 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.051) 0:00:38.706 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.036) 0:00:38.743 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.057) 0:00:38.800 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.040) 0:00:38.841 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.035) 0:00:38.876 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.083) 0:00:38.959 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.036) 0:00:38.996 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.028) 0:00:39.025 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.063) 0:00:39.089 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.050) 0:00:39.139 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.029) 0:00:39.169 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.027) 0:00:39.196 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.028) 0:00:39.225 ****** changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.427) 0:00:39.652 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:31:17 -0500 (0:00:10.399) 0:00:50.052 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.440) 0:00:50.492 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.042) 0:00:50.535 ****** changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1737217878.4555283-14896-65681884352884/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:31:19 -0500 (0:00:00.712) 0:00:51.247 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:31:19 -0500 (0:00:00.031) 0:00:51.279 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:19 -0500 (0:00:00.775) 0:00:52.054 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-demo-network.service sysinit.target system.slice network-online.target basic.target systemd-journald.socket -.mount quadlet-demo-mysql-volume.service tmp.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3051446272", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice quadlet-demo-network.service sysinit.target -.mount quadlet-demo-mysql-volume.service", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.927) 0:00:52.981 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.034) 0:00:53.016 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.064) 0:00:53.081 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.049) 0:00:53.131 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.035) 0:00:53.166 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.055) 0:00:53.222 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.063) 0:00:53.285 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.038) 0:00:53.324 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.034) 0:00:53.359 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.042) 0:00:53.401 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.415) 0:00:53.817 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.036) 0:00:53.853 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.041) 0:00:53.895 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.061) 0:00:53.956 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.035) 0:00:53.991 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.079) 0:00:54.071 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.031) 0:00:54.103 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.032) 0:00:54.136 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.032) 0:00:54.168 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.053) 0:00:54.222 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.033) 0:00:54.255 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.032) 0:00:54.287 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.098) 0:00:54.386 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.061) 0:00:54.447 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.052) 0:00:54.499 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.088) 0:00:54.587 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.058) 0:00:54.646 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.032) 0:00:54.678 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.034) 0:00:54.713 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.032) 0:00:54.745 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.028) 0:00:54.774 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.029) 0:00:54.803 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.410) 0:00:55.214 ****** changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1737217883.142967-15089-251893677799058/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.733) 0:00:55.948 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.089) 0:00:56.038 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.032) 0:00:56.070 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.765) 0:00:56.836 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.053) 0:00:56.890 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.042) 0:00:56.932 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.092) 0:00:57.025 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.063) 0:00:57.089 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.042) 0:00:57.131 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.047) 0:00:57.178 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.056) 0:00:57.235 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.033) 0:00:57.268 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.035) 0:00:57.303 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.042) 0:00:57.345 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.408) 0:00:57.754 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.031) 0:00:57.785 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.032) 0:00:57.817 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.030) 0:00:57.848 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.031) 0:00:57.880 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.033) 0:00:57.913 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.050) 0:00:57.963 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.044) 0:00:58.008 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.087) 0:00:58.095 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.085) 0:00:58.181 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.038) 0:00:58.220 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.030) 0:00:58.250 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.074) 0:00:58.325 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.036) 0:00:58.362 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.029) 0:00:58.391 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.063) 0:00:58.454 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.067) 0:00:58.522 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.047) 0:00:58.570 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.035) 0:00:58.606 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.031) 0:00:58.637 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.029) 0:00:58.666 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.035) 0:00:58.702 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.392) 0:00:59.094 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.029) 0:00:59.123 ****** changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1737217887.039937-15252-234748873817093/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:31:27 -0500 (0:00:00.705) 0:00:59.828 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:31:27 -0500 (0:00:00.027) 0:00:59.856 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.756) 0:01:00.613 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.032) 0:01:00.646 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.073) 0:01:00.720 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.042) 0:01:00.763 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.039) 0:01:00.802 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.031) 0:01:00.833 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.046) 0:01:00.880 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.054) 0:01:00.934 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.032) 0:01:00.967 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.033) 0:01:01.001 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:28 -0500 (0:00:00.040) 0:01:01.042 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.391) 0:01:01.433 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.032) 0:01:01.465 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.040) 0:01:01.505 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.040) 0:01:01.546 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.054) 0:01:01.601 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.056) 0:01:01.657 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.056) 0:01:01.714 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.052) 0:01:01.767 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.055) 0:01:01.822 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.090) 0:01:01.912 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.051) 0:01:01.964 ****** ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.412) 0:01:02.376 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.139) 0:01:02.515 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.042) 0:01:02.557 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.045) 0:01:02.603 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.087) 0:01:02.691 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.059) 0:01:02.751 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.032) 0:01:02.783 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.032) 0:01:02.816 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.029) 0:01:02.845 ****** changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.811) 0:01:03.657 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:31:49 -0500 (0:00:18.384) 0:01:22.042 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:50 -0500 (0:00:00.417) 0:01:22.459 ****** changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1737217910.379617-15884-151214228727689/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:51 -0500 (0:00:00.741) 0:01:23.201 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:31:51 -0500 (0:00:00.046) 0:01:23.248 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:31:51 -0500 (0:00:00.035) 0:01:23.283 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:31:51 -0500 (0:00:00.754) 0:01:24.038 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket sysinit.target basic.target quadlet-demo-mysql.service system.slice network-online.target -.mount quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3044499456", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service -.mount quadlet-demo-network.service system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:31:55 -0500 (0:00:04.076) 0:01:28.115 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.032) 0:01:28.147 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.028) 0:01:28.176 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.027) 0:01:28.203 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.098) 0:01:28.301 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.004596", "end": "2025-01-18 11:31:56.527538", "rc": 0, "start": "2025-01-18 11:31:56.522942" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Jan 18 11:28 ../ -rw-r--r--. 1 root root 74 Jan 18 11:30 quadlet-demo.network -rw-r--r--. 1 root root 9 Jan 18 11:31 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Jan 18 11:31 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Jan 18 11:31 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Jan 18 11:31 quadlet-demo.yml -rw-r--r--. 1 root root 456 Jan 18 11:31 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Jan 18 11:31 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.443) 0:01:28.744 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.035053", "end": "2025-01-18 11:31:56.983533", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:31:56.948480" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check volumes] *********************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Saturday 18 January 2025 11:31:57 -0500 (0:00:00.444) 0:01:29.189 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.033926", "end": "2025-01-18 11:31:57.401874", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:31:57.367948" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [Check pods] ************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Saturday 18 January 2025 11:31:57 -0500 (0:00:00.411) 0:01:29.601 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.033746", "end": "2025-01-18 11:31:57.823120", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:31:57.789374" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Saturday 18 January 2025 11:31:57 -0500 (0:00:00.419) 0:01:30.020 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.014145", "end": "2025-01-18 11:31:58.223340", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:31:58.209195" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service ● quadlet-demo-mysql.service loaded failed failed quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service ● quadlet-demo.service loaded failed failed quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Saturday 18 January 2025 11:31:58 -0500 (0:00:00.401) 0:01:30.422 ****** FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 18 January 2025 11:32:31 -0500 (0:00:33.274) 0:02:03.696 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030573", "end": "2025-01-18 11:32:31.906512", "failed_when_result": true, "rc": 0, "start": "2025-01-18 11:32:31.875939" } STDOUT: Jan 18 11:23:50 ip-10-31-15-213.us-east-1.aws.redhat.com irqbalance[655]: Cannot change IRQ 58 affinity: Permission denied Jan 18 11:23:50 ip-10-31-15-213.us-east-1.aws.redhat.com irqbalance[655]: IRQ 58 affinity is now unmanaged Jan 18 11:23:50 ip-10-31-15-213.us-east-1.aws.redhat.com irqbalance[655]: Cannot change IRQ 59 affinity: Permission denied Jan 18 11:23:50 ip-10-31-15-213.us-east-1.aws.redhat.com irqbalance[655]: IRQ 59 affinity is now unmanaged Jan 18 11:23:53 ip-10-31-15-213.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com kdumpctl[894]: kdump: Rebuilding /boot/initramfs-6.12.0-38.el10.x86_64kdump.img Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com chronyd[670]: Selected source 199.195.248.88 (2.centos.pool.ntp.org) Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1403]: dracut-103-1.el10 Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1406]: Executing: /usr/bin/dracut --list-modules Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1477]: dracut-103-1.el10 Jan 18 11:23:54 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/5421f911-fafd-4f0d-bf2e-2916252992eb /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.12.0-38.el10.x86_64kdump.img 6.12.0-38.el10.x86_64 Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmand' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'multipath' will not be installed, because command 'multipath' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'resume' will not be installed, because it's in the list to be omitted! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmand' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'multipath' will not be installed, because command 'multipath' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Jan 18 11:23:55 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd *** Jan 18 11:23:56 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: fips *** Jan 18 11:23:56 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: fips-crypto-policies *** Jan 18 11:23:56 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-ask-password *** Jan 18 11:23:56 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-initrd *** Jan 18 11:23:56 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-journald *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-modules-load *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-sysctl *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-sysusers *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-tmpfiles *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: systemd-udevd *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: rngd *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: i18n *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: drm *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: prefixdevname *** Jan 18 11:23:57 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: kernel-modules *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: kernel-modules-extra *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: pcmcia *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Skipping udev rule: 60-pcmcia.rules Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: fstab-sys *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: hwdb *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: rootfs-block *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: squash-squashfs *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: terminfo *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: udev-rules *** Jan 18 11:23:58 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: dracut-systemd *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: usrmount *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: base *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: fs-lib *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: kdumpbase *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: memstrack *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: microcode_ctl-fw_dir_override *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl module: mangling fw_dir Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: intel: caveats check for kernel version "6.12.0-38.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: shutdown *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including module: squash-lib *** Jan 18 11:23:59 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Including modules done *** Jan 18 11:24:00 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Installing kernel module dependencies *** Jan 18 11:24:00 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Installing kernel module dependencies done *** Jan 18 11:24:00 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Resolving executable dependencies *** Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Resolving executable dependencies done *** Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Hardlinking files *** Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Mode: real Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Method: sha256 Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Files: 548 Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Linked: 23 files Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Compared: 0 xattrs Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Compared: 51 files Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Saved: 13.58 MiB Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Duration: 0.206016 seconds Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Hardlinking files done *** Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Generating early-microcode cpio image *** Jan 18 11:24:01 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Constructing GenuineIntel.bin *** Jan 18 11:24:02 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Constructing GenuineIntel.bin *** Jan 18 11:24:02 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Store current command line parameters *** Jan 18 11:24:02 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: Stored kernel commandline: Jan 18 11:24:02 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: No dracut internal kernel commandline stored in the initramfs Jan 18 11:24:02 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Squashing the files inside the initramfs *** Jan 18 11:24:09 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Squashing the files inside the initramfs done *** Jan 18 11:24:09 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Creating image file '/boot/initramfs-6.12.0-38.el10.x86_64kdump.img' *** Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com dracut[1480]: *** Creating initramfs image file '/boot/initramfs-6.12.0-38.el10.x86_64kdump.img' done *** Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com kdumpctl[894]: kdump: kexec: loaded kdump kernel Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com kdumpctl[894]: kdump: Starting kdump: [OK] Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com kdumpctl[894]: kdump: Notice: No vmcore creation test performed! Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 254. Jan 18 11:24:10 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Startup finished in 999ms (kernel) + 5.205s (initrd) + 37.890s (userspace) = 44.095s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 999406 microseconds. ░░ ░░ Initrd start-up required 5205777 microseconds. ░░ ░░ Userspace start-up required 37890624 microseconds. Jan 18 11:24:17 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4342]: Accepted publickey for root from 10.30.32.164 port 39722 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4342. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 663. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 662. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 662. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 742. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: New session 2 of user root. ░░ Subject: A new session 2 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 2 has been created for the user root. ░░ ░░ The leading process of the session is 4347. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com (systemd)[4347]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Queued start job for default target default.target. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 7. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[4347]: Startup finished in 227ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 227065 microseconds. Jan 18 11:26:36 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 742. Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 823. Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4342]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4358]: Received disconnect from 10.30.32.164 port 39722:11: disconnected by user Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4358]: Disconnected from user root 10.30.32.164 port 39722 Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4342]: pam_unix(sshd:session): session closed for user root Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: Session 1 logged out. Waiting for processes to exit. Jan 18 11:26:37 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4394]: Accepted publickey for root from 10.31.11.42 port 60948 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4395]: Accepted publickey for root from 10.31.11.42 port 60956 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4394. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 905. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4395. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4394]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 987. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4395]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4401]: Received disconnect from 10.31.11.42 port 60956:11: disconnected by user Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4401]: Disconnected from user root 10.31.11.42 port 60956 Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com sshd-session[4395]: pam_unix(sshd:session): session closed for user root Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: Session 4 logged out. Waiting for processes to exit. Jan 18 11:26:41 ip-10-31-15-213.us-east-1.aws.redhat.com systemd-logind[658]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Jan 18 11:27:05 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 1069. Jan 18 11:27:05 ip-10-31-15-213.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 1069. Jan 18 11:27:05 managed-node2 systemd-hostnamed[5849]: Hostname set to (static) Jan 18 11:27:05 managed-node2 NetworkManager[709]: [1737217625.3526] hostname: static hostname changed from "ip-10-31-15-213.us-east-1.aws.redhat.com" to "managed-node2" Jan 18 11:27:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1147. Jan 18 11:27:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1147. Jan 18 11:27:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:27:35 managed-node2 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jan 18 11:27:35 managed-node2 sshd-session[6522]: Accepted publickey for root from 10.31.43.51 port 50448 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 18 11:27:35 managed-node2 systemd-logind[658]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6522. Jan 18 11:27:35 managed-node2 systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1226. Jan 18 11:27:35 managed-node2 sshd-session[6522]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:27:37 managed-node2 python3.12[6671]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:27:38 managed-node2 python3.12[6825]: ansible-tempfile Invoked with state=directory prefix=lsr_ suffix=_podman path=None Jan 18 11:27:39 managed-node2 python3.12[6950]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:40 managed-node2 python3.12[7075]: ansible-user Invoked with name=podman_basic_user uid=3001 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 18 11:27:40 managed-node2 useradd[7077]: new group: name=podman_basic_user, GID=3001 Jan 18 11:27:40 managed-node2 useradd[7077]: new user: name=podman_basic_user, UID=3001, GID=3001, home=/home/podman_basic_user, shell=/bin/bash, from=/dev/pts/0 Jan 18 11:27:41 managed-node2 python3.12[7202]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1 state=directory mode=0755 owner=podman_basic_user recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:41 managed-node2 python3.12[7327]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd2 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:42 managed-node2 python3.12[7452]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:42 managed-node2 python3.12[7577]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:43 managed-node2 python3.12[7677]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1737217662.4112873-7120-117226880983605/.source.txt _original_basename=.nthgqsa8 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:43 managed-node2 python3.12[7802]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd2/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:43 managed-node2 python3.12[7902]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1737217663.0892506-7120-154356650408310/.source.txt _original_basename=._uamuprj follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:44 managed-node2 python3.12[8027]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd3/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:44 managed-node2 python3.12[8127]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1737217663.7594595-7120-50422747832983/.source.txt _original_basename=.p9l_7l3o follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:44 managed-node2 python3.12[8252]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:27:45 managed-node2 python3.12[8377]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:27:46 managed-node2 sudo[8627]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ivhxbbvaolvqfoqbmeodacnpiaiyljgs ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1737217666.44123-7231-197571347797449/AnsiballZ_dnf.py' Jan 18 11:27:46 managed-node2 sudo[8627]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:27:47 managed-node2 python3.12[8630]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:01 managed-node2 kernel: SELinux: Converting 388 SID table entries... Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: Converting 389 SID table entries... Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:05 managed-node2 setsebool[8715]: The virt_use_nfs policy boolean was changed to 1 by root Jan 18 11:28:05 managed-node2 setsebool[8715]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Jan 18 11:28:08 managed-node2 kernel: SELinux: Converting 396 SID table entries... Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:09 managed-node2 groupadd[8735]: group added to /etc/group: name=polkitd, GID=114 Jan 18 11:28:09 managed-node2 groupadd[8735]: group added to /etc/gshadow: name=polkitd Jan 18 11:28:09 managed-node2 groupadd[8735]: new group: name=polkitd, GID=114 Jan 18 11:28:09 managed-node2 useradd[8738]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 18 11:28:09 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:09 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:10 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1310. Jan 18 11:28:13 managed-node2 systemd[1]: Started run-p9137-i9437.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p9137-i9437.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p9137-i9437.service has finished successfully. ░░ ░░ The job identifier is 1388. Jan 18 11:28:13 managed-node2 systemd[1]: Reload requested from client PID 9141 ('systemctl') (unit session-5.scope)... Jan 18 11:28:13 managed-node2 systemd[1]: Reloading... Jan 18 11:28:13 managed-node2 systemd-rc-local-generator[9184]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:13 managed-node2 systemd-ssh-generator[9187]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:13 managed-node2 (sd-exec-[9160]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:13 managed-node2 systemd[1]: Reloading finished in 202 ms. Jan 18 11:28:13 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1466. Jan 18 11:28:13 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:28:13 managed-node2 systemd[1]: Reloading user@0.service - User Manager for UID 0... ░░ Subject: A reload job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1544. Jan 18 11:28:13 managed-node2 systemd[4347]: Received SIGRTMIN+25 from PID 1 (systemd). Jan 18 11:28:13 managed-node2 systemd[4347]: Reexecuting. Jan 18 11:28:13 managed-node2 systemd[1]: Reloaded user@0.service - User Manager for UID 0. ░░ Subject: A reload job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has finished. ░░ ░░ The job identifier is 1544 and the job result is done. Jan 18 11:28:15 managed-node2 sudo[8627]: pam_unix(sudo:session): session closed for user root Jan 18 11:28:15 managed-node2 python3.12[9789]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:16 managed-node2 python3.12[9926]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:28:17 managed-node2 python3.12[10058]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:18 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:28:18 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1466. Jan 18 11:28:18 managed-node2 systemd[1]: run-p9137-i9437.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p9137-i9437.service has successfully entered the 'dead' state. Jan 18 11:28:18 managed-node2 python3.12[10195]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:19 managed-node2 python3.12[10326]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:19 managed-node2 python3.12[10457]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:21 managed-node2 python3.12[10589]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:28:22 managed-node2 python3.12[10722]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:28:22 managed-node2 systemd[1]: Reload requested from client PID 10725 ('systemctl') (unit session-5.scope)... Jan 18 11:28:22 managed-node2 systemd[1]: Reloading... Jan 18 11:28:22 managed-node2 systemd-rc-local-generator[10768]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:22 managed-node2 systemd-ssh-generator[10771]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:22 managed-node2 (sd-exec-[10743]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:22 managed-node2 systemd[1]: Reloading finished in 190 ms. Jan 18 11:28:22 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1545. Jan 18 11:28:23 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1545. Jan 18 11:28:23 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 18 11:28:24 managed-node2 python3.12[10938]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:28:24 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1628. Jan 18 11:28:24 managed-node2 polkitd[10955]: Started polkitd version 125 Jan 18 11:28:24 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1628. Jan 18 11:28:25 managed-node2 python3.12[11095]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:25 managed-node2 python3.12[11226]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:26 managed-node2 python3.12[11357]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:27 managed-node2 python3.12[11489]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:28 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:28 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:29 managed-node2 systemd[1]: Started run-p11505-i11805.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11505-i11805.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11505-i11805.service has finished successfully. ░░ ░░ The job identifier is 1709. Jan 18 11:28:29 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1787. Jan 18 11:28:29 managed-node2 systemd[1]: Reload requested from client PID 11509 ('systemctl') (unit session-5.scope)... Jan 18 11:28:29 managed-node2 systemd[1]: Reloading... Jan 18 11:28:29 managed-node2 systemd-rc-local-generator[11548]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:29 managed-node2 systemd-ssh-generator[11557]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:29 managed-node2 (sd-exec-[11530]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:29 managed-node2 systemd[1]: Reloading finished in 331 ms. Jan 18 11:28:29 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:28:29 managed-node2 systemd[1]: Started run-p11568-i11868.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11568-i11868.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11568-i11868.service has finished successfully. ░░ ░░ The job identifier is 1865. Jan 18 11:28:29 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:28:29 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1787. Jan 18 11:28:30 managed-node2 systemd[1]: run-p11505-i11805.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11505-i11805.service has successfully entered the 'dead' state. Jan 18 11:28:30 managed-node2 systemd[1]: run-p11568-i11868.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11568-i11868.service has successfully entered the 'dead' state. Jan 18 11:28:30 managed-node2 python3.12[11705]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:28:32 managed-node2 python3.12[11865]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:28:33 managed-node2 kernel: SELinux: Converting 426 SID table entries... Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:34 managed-node2 python3.12[12000]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:28:38 managed-node2 python3.12[12131]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node2 python3.12[12264]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node2 python3.12[12395]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:41 managed-node2 python3.12[12526]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:41 managed-node2 python3.12[12631]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217720.7656662-8928-264050332943533/.source.yml _original_basename=._etjxfpn follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:42 managed-node2 python3.12[12762]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:28:42 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat3670888534-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3670888534-merged.mount has successfully entered the 'dead' state. Jan 18 11:28:42 managed-node2 rsyslogd[891]: imjournal: journal files changed, reloading... [v8.2412.0-1.el10 try https://www.rsyslog.com/e/0 ] Jan 18 11:28:42 managed-node2 kernel: evm: overlay not supported Jan 18 11:28:42 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2987161487-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2987161487-merged.mount has successfully entered the 'dead' state. Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.136419903 -0500 EST m=+0.075181485 system refresh Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.447917868 -0500 EST m=+0.386679583 image build 00e7ed281fae0a457d309c3c4887646b98c1226f1ff60de69ef58bf1e343c789 Jan 18 11:28:42 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 2022. Jan 18 11:28:42 managed-node2 systemd[1]: Created slice machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice - cgroup machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice. ░░ Subject: A start job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished successfully. ░░ ░░ The job identifier is 2021. Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.49381031 -0500 EST m=+0.432571901 container create 029090fa53303e97b2031648820e023bdfc8ca076759e8222ec1b22bd56f5c70 (image=localhost/podman-pause:5.3.1-1733097600, name=1605f25a3e27-infra, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.38.0) Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.498742681 -0500 EST m=+0.437504350 pod create 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:28:43 managed-node2 podman[12769]: 2025-01-18 11:28:43.880806436 -0500 EST m=+1.819568103 container create 798be2d41252cc967126d0ad1739a311fe2c5ce622e7e7ef4422919ff8eb5293 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 18 11:28:43 managed-node2 podman[12769]: 2025-01-18 11:28:43.860351116 -0500 EST m=+1.799113107 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 18 11:28:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:28:46 managed-node2 python3.12[13102]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:47 managed-node2 python3.12[13239]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:50 managed-node2 python3.12[13372]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:51 managed-node2 python3.12[13504]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:28:51 managed-node2 python3.12[13637]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:28:52 managed-node2 python3.12[13770]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:28:54 managed-node2 python3.12[13901]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:55 managed-node2 python3.12[14033]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:56 managed-node2 python3.12[14165]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:28:57 managed-node2 python3.12[14325]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:28:58 managed-node2 python3.12[14456]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:02 managed-node2 python3.12[14587]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:04 managed-node2 podman[14728]: 2025-01-18 11:29:04.938735437 -0500 EST m=+0.516042146 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 18 11:29:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:05 managed-node2 python3.12[14866]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:05 managed-node2 python3.12[14997]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:06 managed-node2 python3.12[15128]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:29:06 managed-node2 python3.12[15233]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217746.0075302-9944-244956550119175/.source.yml _original_basename=.5ma9xxzi follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:07 managed-node2 python3.12[15364]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:07 managed-node2 systemd[1]: Created slice machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice - cgroup machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice. ░░ Subject: A start job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished successfully. ░░ ░░ The job identifier is 2027. Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.267388693 -0500 EST m=+0.120430397 container create f8370433c3a4f8ba8a947f6b1890275bb2962a962b65c68f5555ee94a4731dec (image=localhost/podman-pause:5.3.1-1733097600, name=ba6bb4341304-infra, pod_id=ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713, io.buildah.version=1.38.0) Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.271907663 -0500 EST m=+0.124949338 pod create ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.598058737 -0500 EST m=+0.451100473 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 18 11:29:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:10 managed-node2 python3.12[15640]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:11 managed-node2 python3.12[15777]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:13 managed-node2 python3.12[15910]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:15 managed-node2 python3.12[16042]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:29:15 managed-node2 python3.12[16175]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:29:16 managed-node2 python3.12[16308]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:29:18 managed-node2 python3.12[16439]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:19 managed-node2 python3.12[16571]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:20 managed-node2 python3.12[16703]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:29:22 managed-node2 python3.12[16863]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:29:22 managed-node2 python3.12[16994]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:27 managed-node2 python3.12[17125]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:28 managed-node2 python3.12[17258]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:29 managed-node2 python3.12[17390]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 18 11:29:29 managed-node2 python3.12[17523]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:30 managed-node2 python3.12[17656]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:30 managed-node2 python3.12[17656]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 18 11:29:30 managed-node2 podman[17664]: 2025-01-18 11:29:30.96604145 -0500 EST m=+0.025928446 pod stop 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:29:30 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice - cgroup machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished. ░░ ░░ The job identifier is 2033 and the job result is done. Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.00271487 -0500 EST m=+0.062601829 container remove 798be2d41252cc967126d0ad1739a311fe2c5ce622e7e7ef4422919ff8eb5293 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.024443838 -0500 EST m=+0.084330733 container remove 029090fa53303e97b2031648820e023bdfc8ca076759e8222ec1b22bd56f5c70 (image=localhost/podman-pause:5.3.1-1733097600, name=1605f25a3e27-infra, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.38.0) Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.032434801 -0500 EST m=+0.092321692 pod remove 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:29:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:31 managed-node2 python3.12[17803]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:31 managed-node2 python3.12[17934]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:34 managed-node2 python3.12[18202]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:36 managed-node2 python3.12[18339]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:38 managed-node2 python3.12[18472]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:39 managed-node2 python3.12[18604]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:29:40 managed-node2 python3.12[18737]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:29:41 managed-node2 python3.12[18870]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:29:43 managed-node2 python3.12[19001]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:44 managed-node2 python3.12[19133]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:45 managed-node2 python3.12[19265]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:29:46 managed-node2 python3.12[19425]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:29:47 managed-node2 python3.12[19556]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:52 managed-node2 python3.12[19687]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:53 managed-node2 python3.12[19820]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:54 managed-node2 python3.12[19952]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 18 11:29:54 managed-node2 python3.12[20085]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:55 managed-node2 python3.12[20218]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:55 managed-node2 python3.12[20218]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.501778371 -0500 EST m=+0.028176873 pod stop ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:55 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice - cgroup machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished. ░░ ░░ The job identifier is 2035 and the job result is done. Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.534028376 -0500 EST m=+0.060426894 container remove f8370433c3a4f8ba8a947f6b1890275bb2962a962b65c68f5555ee94a4731dec (image=localhost/podman-pause:5.3.1-1733097600, name=ba6bb4341304-infra, pod_id=ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713, io.buildah.version=1.38.0) Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.542061599 -0500 EST m=+0.068460080 pod remove ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:55 managed-node2 python3.12[20365]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:56 managed-node2 python3.12[20496]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:59 managed-node2 python3.12[20765]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:00 managed-node2 python3.12[20903]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:03 managed-node2 python3.12[21036]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:04 managed-node2 python3.12[21168]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:30:05 managed-node2 python3.12[21301]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:05 managed-node2 python3.12[21434]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:07 managed-node2 python3.12[21565]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:08 managed-node2 python3.12[21697]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:09 managed-node2 python3.12[21829]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:30:11 managed-node2 python3.12[21989]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:30:11 managed-node2 python3.12[22120]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:30:16 managed-node2 python3.12[22251]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 18 11:30:16 managed-node2 python3.12[22383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:17 managed-node2 python3.12[22516]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:17 managed-node2 python3.12[22648]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:18 managed-node2 python3.12[22780]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:19 managed-node2 python3.12[22912]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:19 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 2038. Jan 18 11:30:19 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 2037. Jan 18 11:30:19 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 2037. Jan 18 11:30:19 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 2117. Jan 18 11:30:19 managed-node2 systemd-logind[658]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 22916. Jan 18 11:30:19 managed-node2 (systemd)[22916]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 18 11:30:19 managed-node2 systemd[22916]: Queued start job for default target default.target. Jan 18 11:30:19 managed-node2 systemd[22916]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 18 11:30:19 managed-node2 systemd[22916]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 18 11:30:19 managed-node2 systemd[22916]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 18 11:30:19 managed-node2 systemd[22916]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 18 11:30:19 managed-node2 systemd[22916]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 18 11:30:19 managed-node2 systemd[22916]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 18 11:30:19 managed-node2 systemd[22916]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 18 11:30:19 managed-node2 systemd[22916]: Startup finished in 62ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 62627 microseconds. Jan 18 11:30:19 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 2117. Jan 18 11:30:19 managed-node2 python3.12[23062]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:20 managed-node2 python3.12[23193]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:20 managed-node2 sudo[23366]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eibsoicfymjqrqrmmedgncnjbjufhoom ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217820.5461752-12864-100152971586486/AnsiballZ_podman_image.py' Jan 18 11:30:20 managed-node2 sudo[23366]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 18 11:30:21 managed-node2 systemd[22916]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:30:21 managed-node2 systemd[22916]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:21 managed-node2 systemd[22916]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Ready Jan 18 11:30:21 managed-node2 systemd[22916]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-23376.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-pause-4e2d8a8c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-23394.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 18 11:30:22 managed-node2 systemd[22916]: Started podman-23419.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 18 11:30:22 managed-node2 sudo[23366]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 18 11:30:22 managed-node2 python3.12[23557]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:22 managed-node2 python3.12[23688]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:23 managed-node2 python3.12[23819]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:23 managed-node2 python3.12[23924]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217823.061495-12952-136346022576279/.source.yml _original_basename=.s86ot0b8 follow=False checksum=ebcaacb95b0da6d147ab23a97880f92ccc2779b1 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:24 managed-node2 sudo[24097]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iasyakceuldjmcluzgmadhodgooypgkm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217823.8349788-12985-263720345862192/AnsiballZ_podman_play.py' Jan 18 11:30:24 managed-node2 sudo[24097]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 18 11:30:24 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:30:24 managed-node2 systemd[22916]: Started podman-24108.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 18 11:30:24 managed-node2 systemd[22916]: Created slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice - cgroup user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 18 11:30:24 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 18 11:30:24 managed-node2 systemd[22916]: Started rootless-netns-cdefcea5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 18 11:30:24 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:30:24 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 18 11:30:24 managed-node2 systemd[22916]: Started run-p24190-i24490.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 18 11:30:24 managed-node2 aardvark-dns[24190]: starting aardvark on a child with pid 24191 Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Successfully parsed config Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v6 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 18 11:30:24 managed-node2 conmon[24206]: conmon 4e247833c025794e6d04 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 18 11:30:24 managed-node2 systemd[22916]: Started libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : terminal_ctrl_fd: 14 Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : winsz read side: 17, winsz write side: 18 Jan 18 11:30:24 managed-node2 systemd[22916]: Started libpod-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : container PID: 24210 Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : container 24210 exited with status 127 Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e)" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="Using sqlite as database backend" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph driver overlay" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using transient store: false" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Initializing event backend file" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="Setting parallel job count to 7" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cleaning up container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-3db7a625-f37c-48f9-4131-62aa9e219beb for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network ffb9a78e5d96102e63d60f7c7ce353d5163cf289f06efb872ce4d70b43a3efc3 bridge podman1 2025-01-18 11:30:24.308994366 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 2 networks" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Received SIGHUP Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Successfully parsed config Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v4 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v6 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: No configuration found stopping the sever Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:30:24 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_ffb9a78e_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24_dnat\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Cleaning up rootless network namespace" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Successfully cleaned up container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Unmounted container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\"" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e)" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Shutting down engines" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24212 Jan 18 11:30:25 managed-node2 systemd[22916]: Stopping libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 62. Jan 18 11:30:25 managed-node2 systemd[22916]: Stopped libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 62 and the job result is done. Jan 18 11:30:25 managed-node2 systemd[22916]: Removed slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice - cgroup user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 61 and the job result is done. Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: [starting container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678: cannot get namespace path unless container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e is running: container is stopped] Pod: ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323 Container: 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-18T11:30:24-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-18T11:30:24-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-18T11:30:24-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-18T11:30:24-05:00" level=info msg="Using sqlite as database backend" time="2025-01-18T11:30:24-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph driver overlay" time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-18T11:30:24-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-18T11:30:24-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-18T11:30:24-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-18T11:30:24-05:00" level=debug msg="Using transient store: false" time="2025-01-18T11:30:24-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-18T11:30:24-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-18T11:30:24-05:00" level=debug msg="Initializing event backend file" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-18T11:30:24-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-18T11:30:24-05:00" level=debug msg="found free device name podman1" time="2025-01-18T11:30:24-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c225,c921\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container ID: 2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d" time="2025-01-18T11:30:24-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-18T11:30:24-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-18T11:30:24-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\"" time="2025-01-18T11:30:24-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-18T11:30:24-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-18T11:30:24-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\"" time="2025-01-18T11:30:24-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-18T11:30:24-05:00" level=debug msg="layer list: [\"9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830\"]" time="2025-01-18T11:30:24-05:00" level=debug msg="using \"/var/tmp/buildah1992200273\" to hold temporary data" time="2025-01-18T11:30:24-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/diff" time="2025-01-18T11:30:24-05:00" level=debug msg="layer \"9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-18T11:30:24-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-18T16:30:24.477854787Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-18T16:30:24.450709884Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-18T16:30:24.481026717Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-18T11:30:24-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-18T16:30:24.477854787Z\",\"container\":\"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-18T16:30:24.450709884Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-18T16:30:24.481026717Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:26d2711eba2df3c4227ee8881a1c0b67f1575d8e7862baece5db34e1fa692beb\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-18T11:30:24-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-18T11:30:24-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-18T11:30:24-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-18T11:30:24-05:00" level=debug msg="Overall: allowed" time="2025-01-18T11:30:24-05:00" level=debug msg="start reading config" time="2025-01-18T11:30:24-05:00" level=debug msg="finished reading config" time="2025-01-18T11:30:24-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-18T11:30:24-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-18T11:30:24-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-18T11:30:24-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-18T11:30:24-05:00" level=debug msg="No compression detected" time="2025-01-18T11:30:24-05:00" level=debug msg="Using original blob without modification" time="2025-01-18T11:30:24-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-18T11:30:24-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-18T11:30:24-05:00" level=debug msg="No compression detected" time="2025-01-18T11:30:24-05:00" level=debug msg="Compression change for blob sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Using original blob without modification" time="2025-01-18T11:30:24-05:00" level=debug msg="setting image creation date to 2025-01-18 16:30:24.477854787 +0000 UTC" time="2025-01-18T11:30:24-05:00" level=debug msg="created new image ID \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\" with metadata \"{}\"" time="2025-01-18T11:30:24-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="printing final image id \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice for parent user.slice and name libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="using systemd mode: false" time="2025-01-18T11:30:24-05:00" level=debug msg="setting container name ecc89624fd15-infra" time="2025-01-18T11:30:24-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network ffb9a78e5d96102e63d60f7c7ce353d5163cf289f06efb872ce4d70b43a3efc3 bridge podman1 2025-01-18 11:30:24.308994366 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-18T11:30:24-05:00" level=debug msg="Allocated lock 1 for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" has run directory \"/run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="using systemd mode: false" time="2025-01-18T11:30:24-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-18T11:30:24-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-18T11:30:24-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-18T11:30:24-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /proc" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /sys" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-18T11:30:24-05:00" level=debug msg="Allocated lock 2 for container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" has run directory \"/run/user/3001/containers/overlay-containers/1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Strongconnecting node 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Pushed 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e onto stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Finishing node 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e. Popped 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e off stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Strongconnecting node 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Pushed 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 onto stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Finishing node 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678. Popped 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 off stack" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JSDQ6XK5RFVFINHXICJFEDNKJ6,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c354,c759\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-3db7a625-f37c-48f9-4131-62aa9e219beb for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Mounted container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created root filesystem for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e at /home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged" time="2025-01-18T11:30:24-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-18T11:30:24-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-18T11:30:24-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_ffb9a78e_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "4a:66:31:f9:f1:6c", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport3056496677/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport3056496677/.bp.sock]\"\ntime=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=Ready\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport is ready" time="2025-01-18T11:30:24-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-18T11:30:24-05:00" level=debug msg="Setting Cgroups for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e to user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice:libpod:4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-18T11:30:24-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created OCI spec for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/config.json" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice for parent user.slice and name libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-18T11:30:24-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e -u 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata -p /run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/pidfile -n ecc89624fd15-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e]" time="2025-01-18T11:30:24-05:00" level=info msg="Running conmon under slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice and unitName libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-18T11:30:24-05:00" level=debug msg="Received: 24210" time="2025-01-18T11:30:24-05:00" level=info msg="Got Conmon PID as 24208" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e in OCI runtime" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-18T11:30:24-05:00" level=debug msg="Starting container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e with command [/catatonit -P]" time="2025-01-18T11:30:24-05:00" level=debug msg="Started container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/ACFCPDR6S36YG2XL23UOZJXHEP,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c354,c759\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Mounted container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created root filesystem for container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 at /home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/merged" time="2025-01-18T11:30:24-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-18T11:30:24-05:00" level=debug msg="Cleaning up container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Unmounted container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\"" starting container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678: cannot get namespace path unless container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e is running: container is stopped Error: failed to start 1 containers time="2025-01-18T11:30:24-05:00" level=debug msg="Shutting down engines" time="2025-01-18T11:30:24-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24108 time="2025-01-18T11:30:24-05:00" level=debug msg="Adding parallel job to stop container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding parallel job to stop container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Stopping ctr 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 (timeout 10)" time="2025-01-18T11:30:25-05:00" level=debug msg="Stopping ctr 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e (timeout 10)" time="2025-01-18T11:30:25-05:00" level=debug msg="Removing pod cgroup user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jan 18 11:30:25 managed-node2 sudo[24097]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 18 11:30:25 managed-node2 python3.12[24360]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:26 managed-node2 python3.12[24492]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:26 managed-node2 python3.12[24623]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:28 managed-node2 python3.12[24797]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:30:29 managed-node2 python3.12[24957]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:30 managed-node2 python3.12[25088]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:32 managed-node2 python3.12[25231]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 systemd[1]: Reload requested from client PID 25250 ('systemctl') (unit session-5.scope)... Jan 18 11:30:33 managed-node2 systemd[1]: Reloading... Jan 18 11:30:34 managed-node2 systemd-rc-local-generator[25296]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:34 managed-node2 systemd-ssh-generator[25298]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:34 managed-node2 (sd-exec-[25271]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:34 managed-node2 systemd[1]: Reloading finished in 209 ms. Jan 18 11:30:34 managed-node2 systemd[1]: Started run-p25310-i25610.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p25310-i25610.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p25310-i25610.service has finished successfully. ░░ ░░ The job identifier is 2202. Jan 18 11:30:34 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2280. Jan 18 11:30:34 managed-node2 systemd[1]: Reload requested from client PID 25314 ('systemctl') (unit session-5.scope)... Jan 18 11:30:34 managed-node2 systemd[1]: Reloading... Jan 18 11:30:34 managed-node2 systemd-rc-local-generator[25366]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:34 managed-node2 systemd-ssh-generator[25368]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:34 managed-node2 (sd-exec-[25339]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:34 managed-node2 systemd[1]: Reloading finished in 309 ms. Jan 18 11:30:34 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:30:35 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:30:35 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2280. Jan 18 11:30:35 managed-node2 systemd[1]: run-p25310-i25610.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p25310-i25610.service has successfully entered the 'dead' state. Jan 18 11:30:35 managed-node2 python3.12[25510]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:36 managed-node2 python3.12[25641]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:36 managed-node2 python3.12[25772]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:36 managed-node2 systemd[1]: Reload requested from client PID 25775 ('systemctl') (unit session-5.scope)... Jan 18 11:30:36 managed-node2 systemd[1]: Reloading... Jan 18 11:30:36 managed-node2 systemd-ssh-generator[25823]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:36 managed-node2 systemd-rc-local-generator[25821]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:36 managed-node2 (sd-exec-[25796]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:37 managed-node2 systemd[1]: Reloading finished in 195 ms. Jan 18 11:30:37 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2358. Jan 18 11:30:37 managed-node2 (rtmonger)[25833]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 18 11:30:37 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2358. Jan 18 11:30:37 managed-node2 python3.12[25991]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[26006]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 python3.12[26137]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 18 11:30:38 managed-node2 python3.12[26268]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 18 11:30:39 managed-node2 python3.12[26399]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 18 11:30:39 managed-node2 python3.12[26530]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:39 managed-node2 certmonger[25833]: 2025-01-18 11:30:39 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:40 managed-node2 python3.12[26662]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:40 managed-node2 python3.12[26793]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:40 managed-node2 python3.12[26924]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:41 managed-node2 python3.12[27055]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:41 managed-node2 python3.12[27186]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:43 managed-node2 python3.12[27448]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:44 managed-node2 python3.12[27585]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:30:44 managed-node2 python3.12[27717]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:46 managed-node2 python3.12[27850]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:46 managed-node2 python3.12[27981]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:47 managed-node2 python3.12[28112]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:48 managed-node2 python3.12[28244]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:30:48 managed-node2 python3.12[28377]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:49 managed-node2 python3.12[28510]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:50 managed-node2 python3.12[28641]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:56 managed-node2 python3.12[29252]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:58 managed-node2 python3.12[29385]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:58 managed-node2 python3.12[29516]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:59 managed-node2 python3.12[29621]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217858.56248-14288-74493159750705/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:59 managed-node2 python3.12[29752]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:59 managed-node2 systemd[1]: Reload requested from client PID 29753 ('systemctl') (unit session-5.scope)... Jan 18 11:30:59 managed-node2 systemd[1]: Reloading... Jan 18 11:30:59 managed-node2 systemd-ssh-generator[29803]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:59 managed-node2 systemd-rc-local-generator[29801]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:59 managed-node2 (sd-exec-[29775]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:00 managed-node2 systemd[1]: Reloading finished in 196 ms. Jan 18 11:31:00 managed-node2 python3.12[29940]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:00 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2437. Jan 18 11:31:00 managed-node2 quadlet-demo-network[29944]: systemd-quadlet-demo Jan 18 11:31:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:00 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2437. Jan 18 11:31:01 managed-node2 python3.12[30084]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:03 managed-node2 python3.12[30217]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:03 managed-node2 python3.12[30348]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:03 managed-node2 python3.12[30453]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217863.216999-14492-11702154848732/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:04 managed-node2 python3.12[30584]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:04 managed-node2 systemd[1]: Reload requested from client PID 30585 ('systemctl') (unit session-5.scope)... Jan 18 11:31:04 managed-node2 systemd[1]: Reloading... Jan 18 11:31:04 managed-node2 systemd-ssh-generator[30634]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:04 managed-node2 systemd-rc-local-generator[30632]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:04 managed-node2 (sd-exec-[30607]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:04 managed-node2 systemd[1]: Reloading finished in 204 ms. Jan 18 11:31:05 managed-node2 python3.12[30772]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:05 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2521. Jan 18 11:31:05 managed-node2 podman[30776]: 2025-01-18 11:31:05.315874535 -0500 EST m=+0.024652443 volume create systemd-quadlet-demo-mysql Jan 18 11:31:05 managed-node2 quadlet-demo-mysql-volume[30776]: systemd-quadlet-demo-mysql Jan 18 11:31:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:05 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2521. Jan 18 11:31:06 managed-node2 python3.12[30914]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:07 managed-node2 python3.12[31047]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:17 managed-node2 podman[31187]: 2025-01-18 11:31:17.730931599 -0500 EST m=+9.639813451 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:18 managed-node2 python3.12[31495]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:18 managed-node2 python3.12[31626]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:19 managed-node2 python3.12[31731]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217878.4555283-14896-65681884352884/.source.container _original_basename=.g6l7hkjm follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:19 managed-node2 python3.12[31862]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 31863 ('systemctl') (unit session-5.scope)... Jan 18 11:31:19 managed-node2 systemd[1]: Reloading... Jan 18 11:31:19 managed-node2 systemd-ssh-generator[31906]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:19 managed-node2 systemd-rc-local-generator[31904]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:19 managed-node2 (sd-exec-[31885]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:19 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 18 11:31:20 managed-node2 python3.12[32050]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:20 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2605. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.496650499 -0500 EST m=+0.049779030 container create c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:20 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5264] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5289] device (veth0): carrier: link connected Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5292] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5296] device (podman2): carrier: link connected Jan 18 11:31:20 managed-node2 (udev-worker)[32069]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:20 managed-node2 (udev-worker)[32067]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5505] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5529] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5548] device (podman2): Activation: starting connection 'podman2' (1074efbb-9529-43ba-921b-6e9ef70edf68) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5557] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5565] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5573] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5582] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.477039879 -0500 EST m=+0.030168563 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:20 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2691. Jan 18 11:31:20 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2691. Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5970] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5972] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5977] device (podman2): Activation: successful, device activated. Jan 18 11:31:20 managed-node2 systemd[1]: Started run-p32102-i32402.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32102-i32402.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32102-i32402.scope has finished successfully. ░░ ░░ The job identifier is 2770. Jan 18 11:31:20 managed-node2 systemd[1]: Started c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer - [systemd-run] /usr/bin/podman healthcheck run c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac. ░░ Subject: A start job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished successfully. ░░ ░░ The job identifier is 2776. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.714063203 -0500 EST m=+0.267191817 container init c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2605. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.744374337 -0500 EST m=+0.297503364 container start c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 quadlet-demo-mysql[32054]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac Jan 18 11:31:20 managed-node2 podman[32114]: 2025-01-18 11:31:20.798229509 -0500 EST m=+0.058335837 container died c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 systemd[1]: Stopped c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer - [systemd-run] /usr/bin/podman healthcheck run c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac. ░░ Subject: A stop job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished. ░░ ░░ The job identifier is 2932 and the job result is done. Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 systemd[1]: run-p32102-i32402.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32102-i32402.scope has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:20 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.8571] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:20 managed-node2 systemd[1]: run-netns-netns\x2dd7ac6bec\x2d1d2f\x2d2dc3\x2d6ae5\x2d3ad68d6e13c0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd7ac6bec\x2d1d2f\x2d2dc3\x2d6ae5\x2d3ad68d6e13c0.mount has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 podman[32114]: 2025-01-18 11:31:20.934960057 -0500 EST m=+0.195066423 container remove c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:20 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f44b8f9f5782cefbf6ffe68a333b8178d190e6631786d7d41cf9b947cad743d5-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f44b8f9f5782cefbf6ffe68a333b8178d190e6631786d7d41cf9b947cad743d5-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:21 managed-node2 python3.12[32286]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:22 managed-node2 python3.12[32419]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:23 managed-node2 python3.12[32550]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:23 managed-node2 python3.12[32655]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217883.142967-15089-251893677799058/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:24 managed-node2 python3.12[32786]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:24 managed-node2 systemd[1]: Reload requested from client PID 32787 ('systemctl') (unit session-5.scope)... Jan 18 11:31:24 managed-node2 systemd[1]: Reloading... Jan 18 11:31:24 managed-node2 systemd-rc-local-generator[32836]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:24 managed-node2 systemd-ssh-generator[32839]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:24 managed-node2 (sd-exec-[32810]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:24 managed-node2 systemd[1]: Reloading finished in 205 ms. Jan 18 11:31:25 managed-node2 python3.12[32975]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:26 managed-node2 python3.12[33108]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:27 managed-node2 python3.12[33239]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:27 managed-node2 python3.12[33344]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217887.039937-15252-234748873817093/.source.yml _original_basename=.9j201qqb follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:28 managed-node2 python3.12[33475]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:28 managed-node2 systemd[1]: Reload requested from client PID 33476 ('systemctl') (unit session-5.scope)... Jan 18 11:31:28 managed-node2 systemd[1]: Reloading... Jan 18 11:31:28 managed-node2 systemd-ssh-generator[33525]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:28 managed-node2 systemd-rc-local-generator[33523]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:28 managed-node2 (sd-exec-[33499]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:28 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 18 11:31:29 managed-node2 python3.12[33665]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:30 managed-node2 python3.12[33798]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 18 11:31:30 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:31:31 managed-node2 python3.12[33930]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:31 managed-node2 python3.12[34061]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 podman[34200]: 2025-01-18 11:31:45.223512241 -0500 EST m=+13.268197810 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 systemd[4347]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:31:45 managed-node2 systemd[4347]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:31:45 managed-node2 systemd[4347]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:49 managed-node2 podman[34625]: 2025-01-18 11:31:49.789273837 -0500 EST m=+4.055715037 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 18 11:31:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:50 managed-node2 python3.12[34888]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:50 managed-node2 python3.12[35019]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:50 managed-node2 python3.12[35124]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217910.379617-15884-151214228727689/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:51 managed-node2 python3.12[35255]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:51 managed-node2 systemd[1]: Reload requested from client PID 35256 ('systemctl') (unit session-5.scope)... Jan 18 11:31:51 managed-node2 systemd[1]: Reloading... Jan 18 11:31:51 managed-node2 systemd-rc-local-generator[35302]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:51 managed-node2 systemd-ssh-generator[35304]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:51 managed-node2 (sd-exec-[35278]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:51 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 18 11:31:52 managed-node2 python3.12[35443]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:52 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2935. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.454404162 -0500 EST m=+0.049698892 container create c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4781] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:52 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4872] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 18 11:31:52 managed-node2 (udev-worker)[35457]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:52 managed-node2 (udev-worker)[35458]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4933] device (veth0): carrier: link connected Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4942] device (podman2): carrier: link connected Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5102] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5118] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5134] device (podman2): Activation: starting connection 'podman2' (f4ffa46c-5a51-42dc-9f79-499ce208f1df) Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5137] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5140] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5142] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5145] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.434260103 -0500 EST m=+0.029554934 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:52 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3021. Jan 18 11:31:52 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3021. Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5626] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5629] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5636] device (podman2): Activation: successful, device activated. Jan 18 11:31:52 managed-node2 systemd[1]: Started run-p35491-i35791.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35491-i35791.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35491-i35791.scope has finished successfully. ░░ ░░ The job identifier is 3100. Jan 18 11:31:52 managed-node2 systemd[1]: Started c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer - [systemd-run] /usr/bin/podman healthcheck run c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec. ░░ Subject: A start job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished successfully. ░░ ░░ The job identifier is 3106. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.654249735 -0500 EST m=+0.249544531 container init c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2935. Jan 18 11:31:52 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 2934. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.687513724 -0500 EST m=+0.282808666 container start c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 quadlet-demo-mysql[35447]: c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec Jan 18 11:31:52 managed-node2 podman[35508]: 2025-01-18 11:31:52.721738515 -0500 EST m=+0.050046531 container died c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Pods stopped: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Pods removed: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Secrets removed: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Volumes removed: Jan 18 11:31:52 managed-node2 systemd[1]: c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 systemd[1]: Stopped c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer - [systemd-run] /usr/bin/podman healthcheck run c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec. ░░ Subject: A stop job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished. ░░ ░░ The job identifier is 3262 and the job result is done. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.762492215 -0500 EST m=+0.066794290 volume create wp-pv-claim Jan 18 11:31:52 managed-node2 systemd[1]: run-p35491-i35791.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35491-i35791.scope has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:52 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.7929] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.796853397 -0500 EST m=+0.101155878 container create ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:52 managed-node2 systemd[1]: run-netns-netns\x2de8f3d60e\x2d94aa\x2de59c\x2d25d5\x2d7ab49622e9c0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2de8f3d60e\x2d94aa\x2de59c\x2d25d5\x2d7ab49622e9c0.mount has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.884198004 -0500 EST m=+0.188500074 volume create envoy-proxy-config Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.889315404 -0500 EST m=+0.193617500 volume create envoy-certificates Jan 18 11:31:52 managed-node2 systemd[1]: Created slice machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice - cgroup machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice. ░░ Subject: A start job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished successfully. ░░ ░░ The job identifier is 3264. Jan 18 11:31:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 podman[35508]: 2025-01-18 11:31:52.946202817 -0500 EST m=+0.274510864 container remove c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.968071448 -0500 EST m=+0.272373929 container create 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.973088105 -0500 EST m=+0.277390350 pod create a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:53 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.023105281 -0500 EST m=+0.327407444 container create 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:52.983960024 -0500 EST m=+0.288262529 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.027034276 -0500 EST m=+0.331336488 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.051903944 -0500 EST m=+0.356206095 container create 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.052331434 -0500 EST m=+0.356633525 container restart ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: Started libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope - libcrun container. ░░ Subject: A start job for unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has finished successfully. ░░ ░░ The job identifier is 3270. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.17129751 -0500 EST m=+0.475599758 container init ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.174529033 -0500 EST m=+0.478831206 container start ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has successfully entered the 'dead' state. Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:53 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:53 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.1975] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:53 managed-node2 (udev-worker)[35479]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2045] device (podman2): carrier: link connected Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2064] device (veth0): carrier: link connected Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2083] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2519] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2526] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2570] device (podman2): Activation: starting connection 'podman2' (ced5e191-5f1a-46ed-8291-387c8b116197) Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2573] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2592] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2595] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2612] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2652] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2655] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2672] device (podman2): Activation: successful, device activated. Jan 18 11:31:53 managed-node2 podman[35561]: 2025-01-18 11:31:53.284091379 -0500 EST m=+0.088789796 container died ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: Started run-p35579-i35879.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35579-i35879.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35579-i35879.scope has finished successfully. ░░ ░░ The job identifier is 3276. Jan 18 11:31:53 managed-node2 systemd[1]: Started libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope - libcrun container. ░░ Subject: A start job for unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has finished successfully. ░░ ░░ The job identifier is 3282. Jan 18 11:31:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f1dbbe6c0fb598c05b50f2c97c55a37b7cb307d3fa1b5af688b4e8201f175b72-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f1dbbe6c0fb598c05b50f2c97c55a37b7cb307d3fa1b5af688b4e8201f175b72-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.863507177 -0500 EST m=+1.167809315 container init 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.866449176 -0500 EST m=+1.170772112 container start 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has successfully entered the 'dead' state. Jan 18 11:31:54 managed-node2 podman[35509]: 2025-01-18 11:31:54.442475962 -0500 EST m=+1.746778080 container died 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:55 managed-node2 podman[35561]: 2025-01-18 11:31:55.673439255 -0500 EST m=+2.478137793 container cleanup ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:55 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:55 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:55 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:55 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:55 managed-node2 systemd[1]: run-p35579-i35879.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35579-i35879.scope has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 NetworkManager[709]: [1737217915.7054] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:55 managed-node2 systemd[1]: run-netns-netns\x2d8d85ee07\x2d973d\x2ddc50\x2d8011\x2d55a30378cf41.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d8d85ee07\x2d973d\x2ddc50\x2d8011\x2d55a30378cf41.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-cc18e647f3c0c280b4a52f2ebcc0bacc11186374de68b15e48d68c86119aa73d-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-cc18e647f3c0c280b4a52f2ebcc0bacc11186374de68b15e48d68c86119aa73d-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 podman[35597]: 2025-01-18 11:31:55.773954339 -0500 EST m=+1.686521610 container cleanup 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ee045950926b1e979554c028958a6f97be0fb1297c403876ed2a177339fcb7b8-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ee045950926b1e979554c028958a6f97be0fb1297c403876ed2a177339fcb7b8-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: [starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped] Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: [starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped starting container 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped] Jan 18 11:31:55 managed-node2 systemd[1]: quadlet-demo.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Jan 18 11:31:55 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 2934. Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Volumes: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: wp-pv-claim Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Pod: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Containers: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: starting container 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Error: failed to start 2 containers Jan 18 11:31:55 managed-node2 systemd[1]: quadlet-demo.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 18 11:31:55 managed-node2 podman[35620]: 2025-01-18 11:31:55.939942255 -0500 EST m=+0.038666062 pod stop a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:55 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice - cgroup machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished. ░░ ░░ The job identifier is 3289 and the job result is done. Jan 18 11:31:55 managed-node2 podman[35620]: 2025-01-18 11:31:55.993017214 -0500 EST m=+0.091740928 container remove 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.012314821 -0500 EST m=+0.111038538 container remove 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.035462304 -0500 EST m=+0.134186123 container remove 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.0440827 -0500 EST m=+0.142806389 pod remove a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.120942771 -0500 EST m=+0.219666484 container remove ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Pods stopped: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Pods removed: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Secrets removed: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Volumes removed: Jan 18 11:31:56 managed-node2 systemd[1]: quadlet-demo.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:56 managed-node2 python3.12[35760]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:56 managed-node2 python3.12[35892]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:57 managed-node2 python3.12[36030]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:57 managed-node2 python3.12[36169]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:58 managed-node2 python3.12[36308]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:58 managed-node2 python3.12[36442]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:04 managed-node2 python3.12[36573]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:05 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:32:09 managed-node2 python3.12[36705]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:15 managed-node2 python3.12[36836]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:20 managed-node2 python3.12[36967]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:26 managed-node2 python3.12[37098]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:31 managed-node2 python3.12[37229]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:31 managed-node2 python3.12[37360]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 18 January 2025 11:32:32 -0500 (0:00:00.468) 0:02:04.164 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.040074", "end": "2025-01-18 11:32:32.394207", "rc": 0, "start": "2025-01-18 11:32:32.354133" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check pods] ************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 18 January 2025 11:32:32 -0500 (0:00:00.429) 0:02:04.594 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.032697", "end": "2025-01-18 11:32:32.808099", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:32:32.775402" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 18 January 2025 11:32:32 -0500 (0:00:00.411) 0:02:05.005 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.014330", "end": "2025-01-18 11:32:33.205861", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:32:33.191531" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service ● quadlet-demo-mysql.service loaded failed failed quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service ● quadlet-demo.service loaded failed failed quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.400) 0:02:05.405 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004225", "end": "2025-01-18 11:32:33.594164", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:32:33.589939" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Jan 9 09:16 ../ drwxr-xr-x. 2 root root 32 Jan 9 09:16 getty.target.wants/ lrwxrwxrwx. 1 root root 43 Jan 9 09:16 dbus.service -> /usr/lib/systemd/system/dbus-broker.service lrwxrwxrwx. 1 root root 37 Jan 9 09:16 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Jan 9 09:17 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Jan 9 09:17 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Jan 9 09:17 timers.target.wants/ drwxr-xr-x. 2 root root 38 Jan 9 09:17 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Jan 9 09:20 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Jan 9 09:24 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Jan 9 09:25 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Jan 9 09:25 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Jan 18 11:28 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Jan 18 11:28 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 11 root root 4096 Jan 18 11:28 ./ drwxr-xr-x. 2 root root 4096 Jan 18 11:30 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.385) 0:02:05.791 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.073) 0:02:05.864 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.049) 0:02:05.914 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.035) 0:02:05.950 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.031) 0:02:05.982 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.030) 0:02:06.012 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.083) 0:02:06.096 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.042) 0:02:06.138 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.074) 0:02:06.212 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.867) 0:02:07.080 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.035) 0:02:07.116 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.060) 0:02:07.176 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.070) 0:02:07.246 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.052) 0:02:07.299 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.050) 0:02:07.349 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024558", "end": "2025-01-18 11:32:35.580563", "rc": 0, "start": "2025-01-18 11:32:35.556005" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.456) 0:02:07.805 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.053) 0:02:07.859 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.049) 0:02:07.909 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.059) 0:02:07.968 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.062) 0:02:08.031 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.084) 0:02:08.115 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.083) 0:02:08.199 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.203) 0:02:08.402 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.057) 0:02:08.459 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.059) 0:02:08.519 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.068) 0:02:08.588 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.415) 0:02:09.003 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.052) 0:02:09.056 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.053) 0:02:09.110 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.051) 0:02:09.161 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.054) 0:02:09.215 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.051) 0:02:09.267 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.039) 0:02:09.306 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.051) 0:02:09.357 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.060) 0:02:09.418 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.045) 0:02:09.463 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.061) 0:02:09.525 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.030) 0:02:09.555 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.030) 0:02:09.586 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.058) 0:02:09.645 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.035) 0:02:09.680 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.080) 0:02:09.761 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.070) 0:02:09.831 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.031) 0:02:09.862 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.031) 0:02:09.894 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.061) 0:02:09.956 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.030) 0:02:09.986 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.032) 0:02:10.019 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.033) 0:02:10.053 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.049) 0:02:10.103 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.122) 0:02:10.225 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.067) 0:02:10.293 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.037) 0:02:10.330 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.029) 0:02:10.360 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.030) 0:02:10.390 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.037) 0:02:10.428 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.045) 0:02:10.473 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.801) 0:02:11.275 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.073) 0:02:11.348 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.030) 0:02:11.379 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.030) 0:02:11.409 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.028) 0:02:11.438 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.051) 0:02:11.489 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:28:23 EST", "ActiveEnterTimestampMonotonic": "296477974", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target sysinit.target polkit.service dbus-broker.service system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:28:22 EST", "AssertTimestampMonotonic": "295763956", "Before": "multi-user.target shutdown.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "646644000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:28:22 EST", "ConditionTimestampMonotonic": "295763953", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service iptables.service ip6tables.service ebtables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainHandoffTimestampMonotonic": "295794898", "ExecMainPID": "10780", "ExecMainStartTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainStartTimestampMonotonic": "295767198", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:28:22 EST", "InactiveExitTimestampMonotonic": "295767914", "InvocationID": "570bf0d320324c45b6e7e82fd441da39", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10780", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3105501184", "MemoryCurrent": "34361344", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34873344", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:51 EST", "StateChangeTimestampMonotonic": "505041115", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.593) 0:02:12.082 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:28:23 EST", "ActiveEnterTimestampMonotonic": "296477974", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target sysinit.target polkit.service dbus-broker.service system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:28:22 EST", "AssertTimestampMonotonic": "295763956", "Before": "multi-user.target shutdown.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "646644000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:28:22 EST", "ConditionTimestampMonotonic": "295763953", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service iptables.service ip6tables.service ebtables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainHandoffTimestampMonotonic": "295794898", "ExecMainPID": "10780", "ExecMainStartTimestamp": "Sat 2025-01-18 11:28:22 EST", "ExecMainStartTimestampMonotonic": "295767198", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:28:22 EST", "InactiveExitTimestampMonotonic": "295767914", "InvocationID": "570bf0d320324c45b6e7e82fd441da39", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10780", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3112542208", "MemoryCurrent": "34361344", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34873344", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:51 EST", "StateChangeTimestampMonotonic": "505041115", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.586) 0:02:12.669 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.066) 0:02:12.736 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.050) 0:02:12.787 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.050) 0:02:12.837 ****** ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 18 January 2025 11:32:41 -0500 (0:00:01.058) 0:02:13.896 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.046) 0:02:13.942 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.032) 0:02:13.975 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.032) 0:02:14.007 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.031) 0:02:14.038 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.029) 0:02:14.068 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.028) 0:02:14.096 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.043) 0:02:14.140 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.031) 0:02:14.171 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.029) 0:02:14.201 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.027) 0:02:14.228 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.068) 0:02:14.297 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.091) 0:02:14.388 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.035) 0:02:14.423 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.053) 0:02:14.477 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.033) 0:02:14.510 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.032) 0:02:14.543 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.041) 0:02:14.584 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.030) 0:02:14.615 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.030) 0:02:14.645 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.050) 0:02:14.695 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.032) 0:02:14.728 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.030) 0:02:14.758 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.029) 0:02:14.788 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.032) 0:02:14.820 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.031) 0:02:14.851 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.031) 0:02:14.883 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.035) 0:02:14.919 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.054) 0:02:14.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.070) 0:02:15.044 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.029) 0:02:15.074 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:32:42 -0500 (0:00:00.030) 0:02:15.104 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.029) 0:02:15.133 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.437) 0:02:15.571 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.034) 0:02:15.606 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.053) 0:02:15.659 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.034) 0:02:15.694 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.056) 0:02:15.750 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.044) 0:02:15.795 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.030) 0:02:15.825 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.031) 0:02:15.856 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.030) 0:02:15.886 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.031) 0:02:15.917 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.030) 0:02:15.948 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.030) 0:02:15.978 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.030) 0:02:16.009 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.031) 0:02:16.040 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.029) 0:02:16.069 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:32:43 -0500 (0:00:00.040) 0:02:16.109 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.098) 0:02:16.208 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.029) 0:02:16.238 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.029) 0:02:16.267 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.028) 0:02:16.295 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.029) 0:02:16.325 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.427) 0:02:16.752 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.057) 0:02:16.810 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.054) 0:02:16.864 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.034) 0:02:16.898 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.033) 0:02:16.932 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.042) 0:02:16.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.030) 0:02:17.005 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.032) 0:02:17.038 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.030) 0:02:17.068 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:44 -0500 (0:00:00.031) 0:02:17.099 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.031) 0:02:17.131 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.031) 0:02:17.163 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.030) 0:02:17.193 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.030) 0:02:17.224 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.029) 0:02:17.253 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.038) 0:02:17.292 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.097) 0:02:17.389 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.029) 0:02:17.419 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.030) 0:02:17.449 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.028) 0:02:17.478 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.030) 0:02:17.508 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.431) 0:02:17.939 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.028) 0:02:17.968 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:45 -0500 (0:00:00.147) 0:02:18.115 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.043) 0:02:18.158 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.037) 0:02:18.196 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.032) 0:02:18.228 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.044) 0:02:18.273 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.056) 0:02:18.329 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.034) 0:02:18.364 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.034) 0:02:18.399 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.040) 0:02:18.440 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.385) 0:02:18.825 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.092) 0:02:18.918 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.031) 0:02:18.950 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.032) 0:02:18.982 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.031) 0:02:19.014 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.031) 0:02:19.046 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.031) 0:02:19.077 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:46 -0500 (0:00:00.032) 0:02:19.109 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.031) 0:02:19.141 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.053) 0:02:19.194 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.032) 0:02:19.226 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.029) 0:02:19.255 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.073) 0:02:19.328 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.038) 0:02:19.367 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.074) 0:02:19.441 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:32:47 -0500 (0:00:00.030) 0:02:19.472 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:31:55 EST", "ActiveEnterTimestampMonotonic": "509081963", "ActiveExitTimestamp": "Sat 2025-01-18 11:31:55 EST", "ActiveExitTimestampMonotonic": "509090152", "ActiveState": "failed", "After": "systemd-journald.socket sysinit.target basic.target quadlet-demo-mysql.service system.slice network-online.target -.mount quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:31:52 EST", "AssertTimestampMonotonic": "505865709", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "501745000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:31:52 EST", "ConditionTimestampMonotonic": "505865705", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "8650", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:31:55 EST", "ExecMainExitTimestampMonotonic": "509088784", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:31:52 EST", "ExecMainHandoffTimestampMonotonic": "505879280", "ExecMainPID": "35509", "ExecMainStartTimestamp": "Sat 2025-01-18 11:31:52 EST", "ExecMainStartTimestampMonotonic": "505866835", "ExecMainStatus": "125", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-01-18 11:31:52 EST] ; stop_time=[Sat 2025-01-18 11:31:55 EST] ; pid=35509 ; code=exited ; status=125 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-01-18 11:31:52 EST] ; stop_time=[Sat 2025-01-18 11:31:55 EST] ; pid=35509 ; code=exited ; status=125 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-01-18 11:31:55 EST] ; stop_time=[Sat 2025-01-18 11:31:56 EST] ; pid=35620 ; code=exited ; status=0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-01-18 11:31:55 EST] ; stop_time=[Sat 2025-01-18 11:31:56 EST] ; pid=35620 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-18 11:31:56 EST", "InactiveEnterTimestampMonotonic": "509330636", "InactiveExitTimestamp": "Sat 2025-01-18 11:31:52 EST", "InactiveExitTimestampMonotonic": "505868499", "InvocationID": "e60abcd797de40639d08070bc824ac87", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3126906880", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "55889920", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service -.mount quadlet-demo-network.service system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:56 EST", "StateChangeTimestampMonotonic": "509330636", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:32:48 -0500 (0:00:00.782) 0:02:20.254 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217910.9766614, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1737217910.9786615, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 436208481, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217910.702661, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "2432187541", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:32:48 -0500 (0:00:00.569) 0:02:20.824 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:32:48 -0500 (0:00:00.075) 0:02:20.900 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.473) 0:02:21.373 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.071) 0:02:21.445 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.035) 0:02:21.481 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.049) 0:02:21.531 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.386) 0:02:21.917 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.744) 0:02:22.662 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.034) 0:02:22.696 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.043) 0:02:22.740 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.032) 0:02:22.772 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:01.734792", "end": "2025-01-18 11:32:52.692938", "rc": 0, "start": "2025-01-18 11:32:50.958146" } STDOUT: 00e7ed281fae0a457d309c3c4887646b98c1226f1ff60de69ef58bf1e343c789 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:32:52 -0500 (0:00:02.123) 0:02:24.895 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:52 -0500 (0:00:00.055) 0:02:24.950 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:52 -0500 (0:00:00.029) 0:02:24.980 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:52 -0500 (0:00:00.032) 0:02:25.013 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:32:52 -0500 (0:00:00.051) 0:02:25.064 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.035076", "end": "2025-01-18 11:32:53.301682", "rc": 0, "start": "2025-01-18 11:32:53.266606" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:32:53 -0500 (0:00:00.446) 0:02:25.511 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032519", "end": "2025-01-18 11:32:53.740218", "rc": 0, "start": "2025-01-18 11:32:53.707699" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:32:53 -0500 (0:00:00.429) 0:02:25.940 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033999", "end": "2025-01-18 11:32:54.159501", "rc": 0, "start": "2025-01-18 11:32:54.125502" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:32:54 -0500 (0:00:00.415) 0:02:26.355 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031673", "end": "2025-01-18 11:32:54.572560", "rc": 0, "start": "2025-01-18 11:32:54.540887" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:32:54 -0500 (0:00:00.417) 0:02:26.773 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.451) 0:02:27.224 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.501) 0:02:27.726 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:57 -0500 (0:00:02.361) 0:02:30.087 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.053) 0:02:30.141 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.156) 0:02:30.298 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.051) 0:02:30.349 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.045) 0:02:30.395 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.079) 0:02:30.475 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.065) 0:02:30.540 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.038) 0:02:30.579 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.040) 0:02:30.619 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.062) 0:02:30.682 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.415) 0:02:31.098 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.056) 0:02:31.155 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.039) 0:02:31.195 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.041) 0:02:31.236 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.042) 0:02:31.279 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.039) 0:02:31.318 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.084) 0:02:31.403 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.037) 0:02:31.441 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.061) 0:02:31.502 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.084) 0:02:31.587 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.064) 0:02:31.651 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.059) 0:02:31.711 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.129) 0:02:31.840 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.070) 0:02:31.911 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.139) 0:02:32.051 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.058) 0:02:32.109 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.062) 0:02:32.171 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217890.18163, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1737217887.6326263, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 381681891, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217887.3596258, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "3102570948", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.455) 0:02:32.626 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.100) 0:02:32.727 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.434) 0:02:33.161 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.126) 0:02:33.287 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.068) 0:02:33.356 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.060) 0:02:33.416 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.457) 0:02:33.874 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.806) 0:02:34.681 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.048) 0:02:34.729 ****** changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:03 -0500 (0:00:01.353) 0:02:36.083 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.034) 0:02:36.118 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.032117", "end": "2025-01-18 11:33:04.345166", "rc": 0, "start": "2025-01-18 11:33:04.313049" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.457) 0:02:36.575 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.105) 0:02:36.681 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.057) 0:02:36.738 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.059) 0:02:36.797 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.061) 0:02:36.858 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033132", "end": "2025-01-18 11:33:05.104602", "rc": 0, "start": "2025-01-18 11:33:05.071470" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:05 -0500 (0:00:00.449) 0:02:37.308 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032151", "end": "2025-01-18 11:33:05.536473", "rc": 0, "start": "2025-01-18 11:33:05.504322" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:05 -0500 (0:00:00.430) 0:02:37.738 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030475", "end": "2025-01-18 11:33:05.968403", "rc": 0, "start": "2025-01-18 11:33:05.937928" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.434) 0:02:38.173 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.032487", "end": "2025-01-18 11:33:06.421179", "rc": 0, "start": "2025-01-18 11:33:06.388692" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.552) 0:02:38.726 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.468) 0:02:39.194 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.427) 0:02:39.621 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:09 -0500 (0:00:02.049) 0:02:41.671 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.033) 0:02:41.705 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.046) 0:02:41.752 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.046) 0:02:41.798 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.047) 0:02:41.846 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.070) 0:02:41.917 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.073) 0:02:41.990 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.068) 0:02:42.059 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.038) 0:02:42.097 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.046) 0:02:42.144 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.393) 0:02:42.538 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.035) 0:02:42.573 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.094) 0:02:42.668 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.052) 0:02:42.721 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.063) 0:02:42.784 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.063) 0:02:42.847 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.061) 0:02:42.909 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.063) 0:02:42.972 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.062) 0:02:43.034 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.123) 0:02:43.157 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.064) 0:02:43.222 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.060) 0:02:43.283 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.123) 0:02:43.406 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.069) 0:02:43.476 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.132) 0:02:43.609 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.054) 0:02:43.663 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.060) 0:02:43.724 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217912.7966642, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1737217883.7486212, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 343933165, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217883.4696207, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "4109422841", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.431) 0:02:44.155 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.158) 0:02:44.314 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.403) 0:02:44.717 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.057) 0:02:44.775 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.048) 0:02:44.824 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.040) 0:02:44.864 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.422) 0:02:45.287 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.828) 0:02:46.115 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.056) 0:02:46.171 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.116) 0:02:46.288 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.057) 0:02:46.345 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.036026", "end": "2025-01-18 11:33:14.591959", "rc": 0, "start": "2025-01-18 11:33:14.555933" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.454) 0:02:46.800 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.073) 0:02:46.873 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.033) 0:02:46.906 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.033) 0:02:46.940 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.079) 0:02:47.020 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032856", "end": "2025-01-18 11:33:15.247075", "rc": 0, "start": "2025-01-18 11:33:15.214219" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.428) 0:02:47.448 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.031825", "end": "2025-01-18 11:33:15.683400", "rc": 0, "start": "2025-01-18 11:33:15.651575" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.443) 0:02:47.892 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.031579", "end": "2025-01-18 11:33:16.132914", "rc": 0, "start": "2025-01-18 11:33:16.101335" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.473) 0:02:48.366 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031271", "end": "2025-01-18 11:33:16.614343", "rc": 0, "start": "2025-01-18 11:33:16.583072" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.450) 0:02:48.816 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.423) 0:02:49.240 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.432) 0:02:49.672 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:19 -0500 (0:00:02.029) 0:02:51.702 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.033) 0:02:51.736 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.097) 0:02:51.834 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.055) 0:02:51.889 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.059) 0:02:51.948 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.054) 0:02:52.003 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.077) 0:02:52.080 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.041) 0:02:52.122 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.088) 0:02:52.210 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.047) 0:02:52.257 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.430) 0:02:52.688 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.063) 0:02:52.751 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.060) 0:02:52.812 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.062) 0:02:52.874 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.059) 0:02:52.934 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.062) 0:02:52.997 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.060) 0:02:53.057 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.063) 0:02:53.120 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.060) 0:02:53.181 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.098) 0:02:53.279 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.049) 0:02:53.329 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.036) 0:02:53.365 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.093) 0:02:53.458 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.048) 0:02:53.506 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.140) 0:02:53.647 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.045) 0:02:53.692 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:31:52 EST", "ActiveEnterTimestampMonotonic": "505862514", "ActiveExitTimestamp": "Sat 2025-01-18 11:31:52 EST", "ActiveExitTimestampMonotonic": "506156518", "ActiveState": "failed", "After": "tmp.mount network-online.target quadlet-demo-mysql-volume.service basic.target sysinit.target -.mount quadlet-demo-network.service systemd-journald.socket system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:31:52 EST", "AssertTimestampMonotonic": "505587067", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "274356000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:31:52 EST", "ConditionTimestampMonotonic": "505587063", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "8265", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:31:52 EST", "ExecMainExitTimestampMonotonic": "506154296", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "35503", "ExecMainStartTimestamp": "Sat 2025-01-18 11:31:52 EST", "ExecMainStartTimestampMonotonic": "505800001", "ExecMainStatus": "1", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-18 11:31:53 EST", "InactiveEnterTimestampMonotonic": "506211854", "InactiveExitTimestamp": "Sat 2025-01-18 11:31:52 EST", "InactiveExitTimestampMonotonic": "505589092", "InvocationID": "904b15c8df2a4231a2d04f0d21c5360f", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3244511232", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35901440", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice quadlet-demo-mysql-volume.service quadlet-demo-network.service sysinit.target", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:53 EST", "StateChangeTimestampMonotonic": "506211854", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.778) 0:02:54.471 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217879.048615, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1737217879.050615, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 218104040, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217878.7736146, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "1511981016", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.423) 0:02:54.894 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.107) 0:02:55.002 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.410) 0:02:55.413 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.091) 0:02:55.504 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.063) 0:02:55.567 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.077) 0:02:55.645 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.431) 0:02:56.076 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:24 -0500 (0:00:00.773) 0:02:56.850 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.533) 0:02:57.384 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.091) 0:02:57.475 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.097) 0:02:57.573 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.032620", "end": "2025-01-18 11:33:25.851624", "rc": 0, "start": "2025-01-18 11:33:25.819004" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.502) 0:02:58.075 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.181) 0:02:58.257 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.060) 0:02:58.318 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.062) 0:02:58.381 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.056) 0:02:58.438 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033078", "end": "2025-01-18 11:33:26.682215", "rc": 0, "start": "2025-01-18 11:33:26.649137" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.458) 0:02:58.896 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.034722", "end": "2025-01-18 11:33:27.173533", "rc": 0, "start": "2025-01-18 11:33:27.138811" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.496) 0:02:59.392 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030285", "end": "2025-01-18 11:33:27.644127", "rc": 0, "start": "2025-01-18 11:33:27.613842" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.473) 0:02:59.866 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031889", "end": "2025-01-18 11:33:28.157005", "rc": 0, "start": "2025-01-18 11:33:28.125116" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.518) 0:03:00.384 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.481) 0:03:00.866 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.526) 0:03:01.392 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:31 -0500 (0:00:02.086) 0:03:03.479 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.039) 0:03:03.519 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.074) 0:03:03.593 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.069) 0:03:03.663 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.057) 0:03:03.721 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.156) 0:03:03.878 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.110) 0:03:03.988 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.068) 0:03:04.057 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.067) 0:03:04.125 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.078) 0:03:04.203 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.532) 0:03:04.735 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.105) 0:03:04.840 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.111) 0:03:04.952 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.101) 0:03:05.053 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.107) 0:03:05.160 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.071) 0:03:05.232 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.067) 0:03:05.300 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.076) 0:03:05.377 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.061) 0:03:05.438 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.097) 0:03:05.536 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.062) 0:03:05.598 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.197) 0:03:05.796 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.208) 0:03:06.004 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.073) 0:03:06.078 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.241) 0:03:06.320 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.106) 0:03:06.427 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:31:05 EST", "ActiveEnterTimestampMonotonic": "458525556", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target basic.target network-online.target systemd-journald.socket system.slice -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:31:05 EST", "AssertTimestampMonotonic": "458477631", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "36498000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:31:05 EST", "ConditionTimestampMonotonic": "458477627", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "6342", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:31:05 EST", "ExecMainExitTimestampMonotonic": "458525354", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:31:05 EST", "ExecMainHandoffTimestampMonotonic": "458488382", "ExecMainPID": "30776", "ExecMainStartTimestamp": "Sat 2025-01-18 11:31:05 EST", "ExecMainStartTimestampMonotonic": "458478441", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:31:05 EST", "InactiveExitTimestampMonotonic": "458478974", "InvocationID": "35b7e7bb03d4443ba8bee595854e03c5", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3228217344", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "14749696", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:05 EST", "StateChangeTimestampMonotonic": "458525556", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.932) 0:03:07.359 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217863.8295949, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1737217863.831595, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 608174304, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217863.5345945, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "1324838616", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.497) 0:03:07.856 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.174) 0:03:08.032 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.468) 0:03:08.501 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.099) 0:03:08.601 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.065) 0:03:08.666 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.061) 0:03:08.728 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.473) 0:03:09.201 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.852) 0:03:10.054 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.639) 0:03:10.694 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.076) 0:03:10.770 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.059) 0:03:10.829 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030066", "end": "2025-01-18 11:33:39.078957", "rc": 0, "start": "2025-01-18 11:33:39.048891" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.498) 0:03:11.328 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.105) 0:03:11.434 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.045) 0:03:11.479 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.052) 0:03:11.532 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.045) 0:03:11.577 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033393", "end": "2025-01-18 11:33:39.796606", "rc": 0, "start": "2025-01-18 11:33:39.763213" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.447) 0:03:12.025 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032548", "end": "2025-01-18 11:33:40.275395", "rc": 0, "start": "2025-01-18 11:33:40.242847" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.480) 0:03:12.506 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033108", "end": "2025-01-18 11:33:40.809373", "rc": 0, "start": "2025-01-18 11:33:40.776265" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.578) 0:03:13.087 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.034203", "end": "2025-01-18 11:33:41.375849", "rc": 0, "start": "2025-01-18 11:33:41.341646" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.489) 0:03:13.577 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.497) 0:03:14.074 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.582) 0:03:14.657 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:44 -0500 (0:00:02.156) 0:03:16.814 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.084) 0:03:16.898 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.048) 0:03:16.947 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.042) 0:03:16.990 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.041) 0:03:17.031 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.086) 0:03:17.117 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.080) 0:03:17.198 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.045) 0:03:17.243 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.046) 0:03:17.290 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.047) 0:03:17.338 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217697.4254181, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217689.5844104, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.387) 0:03:17.725 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.042) 0:03:17.768 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.035) 0:03:17.803 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.036) 0:03:17.840 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.036) 0:03:17.876 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.037) 0:03:17.913 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.039) 0:03:17.952 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.119) 0:03:18.072 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.063) 0:03:18.135 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.096) 0:03:18.232 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.054) 0:03:18.286 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.037) 0:03:18.324 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.089) 0:03:18.413 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.045) 0:03:18.459 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.079) 0:03:18.538 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.032) 0:03:18.571 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:31:00 EST", "ActiveEnterTimestampMonotonic": "453948574", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target network-online.target system.slice -.mount basic.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:31:00 EST", "AssertTimestampMonotonic": "453899810", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "38915000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:31:00 EST", "ConditionTimestampMonotonic": "453899806", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "6303", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698241536", "EffectiveMemoryMax": "3698241536", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:31:00 EST", "ExecMainExitTimestampMonotonic": "453948387", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:31:00 EST", "ExecMainHandoffTimestampMonotonic": "453911547", "ExecMainPID": "29944", "ExecMainStartTimestamp": "Sat 2025-01-18 11:31:00 EST", "ExecMainStartTimestampMonotonic": "453900608", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:31:00 EST", "InactiveExitTimestampMonotonic": "453901151", "InvocationID": "252254a8383043fab10d020d5f8d556d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3212886016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "18575360", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:31:00 EST", "StateChangeTimestampMonotonic": "453948574", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.813) 0:03:19.385 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1737217859.276589, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1737217859.278589, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 562036949, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217858.8895884, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "3664251563", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.403) 0:03:19.788 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.086) 0:03:19.875 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.404) 0:03:20.280 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.051) 0:03:20.331 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.085) 0:03:20.416 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.036) 0:03:20.453 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.422) 0:03:20.875 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:49 -0500 (0:00:00.746) 0:03:21.621 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:49 -0500 (0:00:00.428) 0:03:22.050 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:49 -0500 (0:00:00.047) 0:03:22.098 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.035) 0:03:22.133 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030580", "end": "2025-01-18 11:33:50.356449", "rc": 0, "start": "2025-01-18 11:33:50.325869" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.422) 0:03:22.555 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.062) 0:03:22.618 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.033) 0:03:22.652 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.038) 0:03:22.690 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.055) 0:03:22.745 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031554", "end": "2025-01-18 11:33:50.977874", "rc": 0, "start": "2025-01-18 11:33:50.946320" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:51 -0500 (0:00:00.451) 0:03:23.197 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.030261", "end": "2025-01-18 11:33:51.430029", "rc": 0, "start": "2025-01-18 11:33:51.399768" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:51 -0500 (0:00:00.436) 0:03:23.634 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.029435", "end": "2025-01-18 11:33:51.871811", "rc": 0, "start": "2025-01-18 11:33:51.842376" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:51 -0500 (0:00:00.462) 0:03:24.096 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.032365", "end": "2025-01-18 11:33:52.339904", "rc": 0, "start": "2025-01-18 11:33:52.307539" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:52 -0500 (0:00:00.444) 0:03:24.540 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:52 -0500 (0:00:00.461) 0:03:25.002 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:53 -0500 (0:00:00.441) 0:03:25.443 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:55 -0500 (0:00:01.996) 0:03:27.439 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.065) 0:03:27.504 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.051) 0:03:27.556 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.061) 0:03:27.617 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.115) 0:03:27.733 ****** fatal: [managed-node2]: FAILED! => { "assertion": "ansible_facts[\"services\"] | dict2items | rejectattr(\"value.status\", \"match\", \"not-found\") | selectattr(\"key\", \"match\", \"quadlet-demo\") | list | length == 0", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.087) 0:03:27.820 ****** ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.368457", "end": "2025-01-18 11:33:56.394102", "rc": 0, "start": "2025-01-18 11:33:56.025645" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet quadlet-demo-mysql.service not-found failed failed quadlet-demo-mysql.service quadlet-demo.service not-found failed failed quadlet-demo.service + systemctl list-unit-files --all + grep quadlet + : + grep quadlet + systemctl list-units --plain --failed -l --all quadlet-demo-mysql.service not-found failed failed quadlet-demo-mysql.service quadlet-demo.service not-found failed failed quadlet-demo.service TASK [Get journald] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 18 January 2025 11:33:56 -0500 (0:00:00.772) 0:03:28.593 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030708", "end": "2025-01-18 11:33:56.792809", "failed_when_result": true, "rc": 0, "start": "2025-01-18 11:33:56.762101" } STDOUT: Jan 18 11:27:42 managed-node2 python3.12[7452]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:42 managed-node2 python3.12[7577]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:43 managed-node2 python3.12[7677]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1737217662.4112873-7120-117226880983605/.source.txt _original_basename=.nthgqsa8 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:43 managed-node2 python3.12[7802]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd2/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:43 managed-node2 python3.12[7902]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1737217663.0892506-7120-154356650408310/.source.txt _original_basename=._uamuprj follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:44 managed-node2 python3.12[8027]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd3/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:27:44 managed-node2 python3.12[8127]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_8do_wnm9_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1737217663.7594595-7120-50422747832983/.source.txt _original_basename=.p9l_7l3o follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:27:44 managed-node2 python3.12[8252]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:27:45 managed-node2 python3.12[8377]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:27:46 managed-node2 sudo[8627]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ivhxbbvaolvqfoqbmeodacnpiaiyljgs ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1737217666.44123-7231-197571347797449/AnsiballZ_dnf.py' Jan 18 11:27:46 managed-node2 sudo[8627]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:27:47 managed-node2 python3.12[8630]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:01 managed-node2 kernel: SELinux: Converting 388 SID table entries... Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:01 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: Converting 389 SID table entries... Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:03 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:05 managed-node2 setsebool[8715]: The virt_use_nfs policy boolean was changed to 1 by root Jan 18 11:28:05 managed-node2 setsebool[8715]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Jan 18 11:28:08 managed-node2 kernel: SELinux: Converting 396 SID table entries... Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:08 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:09 managed-node2 groupadd[8735]: group added to /etc/group: name=polkitd, GID=114 Jan 18 11:28:09 managed-node2 groupadd[8735]: group added to /etc/gshadow: name=polkitd Jan 18 11:28:09 managed-node2 groupadd[8735]: new group: name=polkitd, GID=114 Jan 18 11:28:09 managed-node2 useradd[8738]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 18 11:28:09 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:09 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:10 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1310. Jan 18 11:28:13 managed-node2 systemd[1]: Started run-p9137-i9437.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p9137-i9437.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p9137-i9437.service has finished successfully. ░░ ░░ The job identifier is 1388. Jan 18 11:28:13 managed-node2 systemd[1]: Reload requested from client PID 9141 ('systemctl') (unit session-5.scope)... Jan 18 11:28:13 managed-node2 systemd[1]: Reloading... Jan 18 11:28:13 managed-node2 systemd-rc-local-generator[9184]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:13 managed-node2 systemd-ssh-generator[9187]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:13 managed-node2 (sd-exec-[9160]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:13 managed-node2 systemd[1]: Reloading finished in 202 ms. Jan 18 11:28:13 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1466. Jan 18 11:28:13 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:28:13 managed-node2 systemd[1]: Reloading user@0.service - User Manager for UID 0... ░░ Subject: A reload job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1544. Jan 18 11:28:13 managed-node2 systemd[4347]: Received SIGRTMIN+25 from PID 1 (systemd). Jan 18 11:28:13 managed-node2 systemd[4347]: Reexecuting. Jan 18 11:28:13 managed-node2 systemd[1]: Reloaded user@0.service - User Manager for UID 0. ░░ Subject: A reload job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has finished. ░░ ░░ The job identifier is 1544 and the job result is done. Jan 18 11:28:15 managed-node2 sudo[8627]: pam_unix(sudo:session): session closed for user root Jan 18 11:28:15 managed-node2 python3.12[9789]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:16 managed-node2 python3.12[9926]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:28:17 managed-node2 python3.12[10058]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:18 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:28:18 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1466. Jan 18 11:28:18 managed-node2 systemd[1]: run-p9137-i9437.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p9137-i9437.service has successfully entered the 'dead' state. Jan 18 11:28:18 managed-node2 python3.12[10195]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:19 managed-node2 python3.12[10326]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:19 managed-node2 python3.12[10457]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:21 managed-node2 python3.12[10589]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:28:22 managed-node2 python3.12[10722]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:28:22 managed-node2 systemd[1]: Reload requested from client PID 10725 ('systemctl') (unit session-5.scope)... Jan 18 11:28:22 managed-node2 systemd[1]: Reloading... Jan 18 11:28:22 managed-node2 systemd-rc-local-generator[10768]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:22 managed-node2 systemd-ssh-generator[10771]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:22 managed-node2 (sd-exec-[10743]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:22 managed-node2 systemd[1]: Reloading finished in 190 ms. Jan 18 11:28:22 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1545. Jan 18 11:28:23 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1545. Jan 18 11:28:23 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 18 11:28:24 managed-node2 python3.12[10938]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:28:24 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1628. Jan 18 11:28:24 managed-node2 polkitd[10955]: Started polkitd version 125 Jan 18 11:28:24 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1628. Jan 18 11:28:25 managed-node2 python3.12[11095]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:25 managed-node2 python3.12[11226]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:26 managed-node2 python3.12[11357]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:27 managed-node2 python3.12[11489]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:28 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:28 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:28:29 managed-node2 systemd[1]: Started run-p11505-i11805.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11505-i11805.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11505-i11805.service has finished successfully. ░░ ░░ The job identifier is 1709. Jan 18 11:28:29 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1787. Jan 18 11:28:29 managed-node2 systemd[1]: Reload requested from client PID 11509 ('systemctl') (unit session-5.scope)... Jan 18 11:28:29 managed-node2 systemd[1]: Reloading... Jan 18 11:28:29 managed-node2 systemd-rc-local-generator[11548]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:29 managed-node2 systemd-ssh-generator[11557]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:29 managed-node2 (sd-exec-[11530]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:29 managed-node2 systemd[1]: Reloading finished in 331 ms. Jan 18 11:28:29 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:28:29 managed-node2 systemd[1]: Started run-p11568-i11868.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11568-i11868.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11568-i11868.service has finished successfully. ░░ ░░ The job identifier is 1865. Jan 18 11:28:29 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:28:29 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1787. Jan 18 11:28:30 managed-node2 systemd[1]: run-p11505-i11805.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11505-i11805.service has successfully entered the 'dead' state. Jan 18 11:28:30 managed-node2 systemd[1]: run-p11568-i11868.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11568-i11868.service has successfully entered the 'dead' state. Jan 18 11:28:30 managed-node2 python3.12[11705]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:28:32 managed-node2 python3.12[11865]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:28:33 managed-node2 kernel: SELinux: Converting 426 SID table entries... Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:28:33 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:28:34 managed-node2 python3.12[12000]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:28:38 managed-node2 python3.12[12131]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node2 python3.12[12264]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node2 python3.12[12395]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:41 managed-node2 python3.12[12526]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:41 managed-node2 python3.12[12631]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217720.7656662-8928-264050332943533/.source.yml _original_basename=._etjxfpn follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:42 managed-node2 python3.12[12762]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:28:42 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat3670888534-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3670888534-merged.mount has successfully entered the 'dead' state. Jan 18 11:28:42 managed-node2 rsyslogd[891]: imjournal: journal files changed, reloading... [v8.2412.0-1.el10 try https://www.rsyslog.com/e/0 ] Jan 18 11:28:42 managed-node2 kernel: evm: overlay not supported Jan 18 11:28:42 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2987161487-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2987161487-merged.mount has successfully entered the 'dead' state. Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.136419903 -0500 EST m=+0.075181485 system refresh Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.447917868 -0500 EST m=+0.386679583 image build 00e7ed281fae0a457d309c3c4887646b98c1226f1ff60de69ef58bf1e343c789 Jan 18 11:28:42 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 2022. Jan 18 11:28:42 managed-node2 systemd[1]: Created slice machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice - cgroup machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice. ░░ Subject: A start job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished successfully. ░░ ░░ The job identifier is 2021. Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.49381031 -0500 EST m=+0.432571901 container create 029090fa53303e97b2031648820e023bdfc8ca076759e8222ec1b22bd56f5c70 (image=localhost/podman-pause:5.3.1-1733097600, name=1605f25a3e27-infra, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.38.0) Jan 18 11:28:42 managed-node2 podman[12769]: 2025-01-18 11:28:42.498742681 -0500 EST m=+0.437504350 pod create 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:28:43 managed-node2 podman[12769]: 2025-01-18 11:28:43.880806436 -0500 EST m=+1.819568103 container create 798be2d41252cc967126d0ad1739a311fe2c5ce622e7e7ef4422919ff8eb5293 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 18 11:28:43 managed-node2 podman[12769]: 2025-01-18 11:28:43.860351116 -0500 EST m=+1.799113107 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 18 11:28:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:28:46 managed-node2 python3.12[13102]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:47 managed-node2 python3.12[13239]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:50 managed-node2 python3.12[13372]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:51 managed-node2 python3.12[13504]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:28:51 managed-node2 python3.12[13637]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:28:52 managed-node2 python3.12[13770]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:28:54 managed-node2 python3.12[13901]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:55 managed-node2 python3.12[14033]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:28:56 managed-node2 python3.12[14165]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:28:57 managed-node2 python3.12[14325]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:28:58 managed-node2 python3.12[14456]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:02 managed-node2 python3.12[14587]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:04 managed-node2 podman[14728]: 2025-01-18 11:29:04.938735437 -0500 EST m=+0.516042146 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 18 11:29:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:05 managed-node2 python3.12[14866]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:05 managed-node2 python3.12[14997]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:06 managed-node2 python3.12[15128]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:29:06 managed-node2 python3.12[15233]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217746.0075302-9944-244956550119175/.source.yml _original_basename=.5ma9xxzi follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:07 managed-node2 python3.12[15364]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:07 managed-node2 systemd[1]: Created slice machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice - cgroup machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice. ░░ Subject: A start job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished successfully. ░░ ░░ The job identifier is 2027. Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.267388693 -0500 EST m=+0.120430397 container create f8370433c3a4f8ba8a947f6b1890275bb2962a962b65c68f5555ee94a4731dec (image=localhost/podman-pause:5.3.1-1733097600, name=ba6bb4341304-infra, pod_id=ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713, io.buildah.version=1.38.0) Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.271907663 -0500 EST m=+0.124949338 pod create ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:07 managed-node2 podman[15371]: 2025-01-18 11:29:07.598058737 -0500 EST m=+0.451100473 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 18 11:29:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:10 managed-node2 python3.12[15640]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:11 managed-node2 python3.12[15777]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:13 managed-node2 python3.12[15910]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:15 managed-node2 python3.12[16042]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:29:15 managed-node2 python3.12[16175]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:29:16 managed-node2 python3.12[16308]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:29:18 managed-node2 python3.12[16439]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:19 managed-node2 python3.12[16571]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:20 managed-node2 python3.12[16703]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:29:22 managed-node2 python3.12[16863]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:29:22 managed-node2 python3.12[16994]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:27 managed-node2 python3.12[17125]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:28 managed-node2 python3.12[17258]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:29 managed-node2 python3.12[17390]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 18 11:29:29 managed-node2 python3.12[17523]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:30 managed-node2 python3.12[17656]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:30 managed-node2 python3.12[17656]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 18 11:29:30 managed-node2 podman[17664]: 2025-01-18 11:29:30.96604145 -0500 EST m=+0.025928446 pod stop 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:29:30 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice - cgroup machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93.slice has finished. ░░ ░░ The job identifier is 2033 and the job result is done. Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.00271487 -0500 EST m=+0.062601829 container remove 798be2d41252cc967126d0ad1739a311fe2c5ce622e7e7ef4422919ff8eb5293 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.024443838 -0500 EST m=+0.084330733 container remove 029090fa53303e97b2031648820e023bdfc8ca076759e8222ec1b22bd56f5c70 (image=localhost/podman-pause:5.3.1-1733097600, name=1605f25a3e27-infra, pod_id=1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93, io.buildah.version=1.38.0) Jan 18 11:29:31 managed-node2 podman[17664]: 2025-01-18 11:29:31.032434801 -0500 EST m=+0.092321692 pod remove 1605f25a3e2717cc73705c948b959f044565c241ccf1c583fa5d9fa481c5bc93 (image=, name=nopull) Jan 18 11:29:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:31 managed-node2 python3.12[17803]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:31 managed-node2 python3.12[17934]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:34 managed-node2 python3.12[18202]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:36 managed-node2 python3.12[18339]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:38 managed-node2 python3.12[18472]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:39 managed-node2 python3.12[18604]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:29:40 managed-node2 python3.12[18737]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:29:41 managed-node2 python3.12[18870]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:29:43 managed-node2 python3.12[19001]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:44 managed-node2 python3.12[19133]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:29:45 managed-node2 python3.12[19265]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:29:46 managed-node2 python3.12[19425]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:29:47 managed-node2 python3.12[19556]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:29:52 managed-node2 python3.12[19687]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:53 managed-node2 python3.12[19820]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:54 managed-node2 python3.12[19952]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 18 11:29:54 managed-node2 python3.12[20085]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:55 managed-node2 python3.12[20218]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:29:55 managed-node2 python3.12[20218]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.501778371 -0500 EST m=+0.028176873 pod stop ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:55 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice - cgroup machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713.slice has finished. ░░ ░░ The job identifier is 2035 and the job result is done. Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.534028376 -0500 EST m=+0.060426894 container remove f8370433c3a4f8ba8a947f6b1890275bb2962a962b65c68f5555ee94a4731dec (image=localhost/podman-pause:5.3.1-1733097600, name=ba6bb4341304-infra, pod_id=ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713, io.buildah.version=1.38.0) Jan 18 11:29:55 managed-node2 podman[20225]: 2025-01-18 11:29:55.542061599 -0500 EST m=+0.068460080 pod remove ba6bb4341304eea77e588db44a4971abda509361fcb33d0fa426535dff558713 (image=, name=bogus) Jan 18 11:29:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:55 managed-node2 python3.12[20365]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:56 managed-node2 python3.12[20496]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:59 managed-node2 python3.12[20765]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:00 managed-node2 python3.12[20903]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:03 managed-node2 python3.12[21036]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:04 managed-node2 python3.12[21168]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:30:05 managed-node2 python3.12[21301]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:05 managed-node2 python3.12[21434]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:07 managed-node2 python3.12[21565]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:08 managed-node2 python3.12[21697]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:09 managed-node2 python3.12[21829]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 18 11:30:11 managed-node2 python3.12[21989]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 18 11:30:11 managed-node2 python3.12[22120]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 18 11:30:16 managed-node2 python3.12[22251]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 18 11:30:16 managed-node2 python3.12[22383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:17 managed-node2 python3.12[22516]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:17 managed-node2 python3.12[22648]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:18 managed-node2 python3.12[22780]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:19 managed-node2 python3.12[22912]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:19 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 2038. Jan 18 11:30:19 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 2037. Jan 18 11:30:19 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 2037. Jan 18 11:30:19 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 2117. Jan 18 11:30:19 managed-node2 systemd-logind[658]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 22916. Jan 18 11:30:19 managed-node2 (systemd)[22916]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 18 11:30:19 managed-node2 systemd[22916]: Queued start job for default target default.target. Jan 18 11:30:19 managed-node2 systemd[22916]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 18 11:30:19 managed-node2 systemd[22916]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 18 11:30:19 managed-node2 systemd[22916]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 18 11:30:19 managed-node2 systemd[22916]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 18 11:30:19 managed-node2 systemd[22916]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 18 11:30:19 managed-node2 systemd[22916]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 18 11:30:19 managed-node2 systemd[22916]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 18 11:30:19 managed-node2 systemd[22916]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 18 11:30:19 managed-node2 systemd[22916]: Startup finished in 62ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 62627 microseconds. Jan 18 11:30:19 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 2117. Jan 18 11:30:19 managed-node2 python3.12[23062]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:20 managed-node2 python3.12[23193]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:20 managed-node2 sudo[23366]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eibsoicfymjqrqrmmedgncnjbjufhoom ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217820.5461752-12864-100152971586486/AnsiballZ_podman_image.py' Jan 18 11:30:20 managed-node2 sudo[23366]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 18 11:30:21 managed-node2 systemd[22916]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:30:21 managed-node2 systemd[22916]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:21 managed-node2 systemd[22916]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:30:21 managed-node2 dbus-broker-launch[23391]: Ready Jan 18 11:30:21 managed-node2 systemd[22916]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-23376.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-pause-4e2d8a8c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 18 11:30:21 managed-node2 systemd[22916]: Started podman-23394.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 18 11:30:22 managed-node2 systemd[22916]: Started podman-23419.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 18 11:30:22 managed-node2 sudo[23366]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 18 11:30:22 managed-node2 python3.12[23557]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:22 managed-node2 python3.12[23688]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:23 managed-node2 python3.12[23819]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:23 managed-node2 python3.12[23924]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217823.061495-12952-136346022576279/.source.yml _original_basename=.s86ot0b8 follow=False checksum=ebcaacb95b0da6d147ab23a97880f92ccc2779b1 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:24 managed-node2 sudo[24097]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iasyakceuldjmcluzgmadhodgooypgkm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217823.8349788-12985-263720345862192/AnsiballZ_podman_play.py' Jan 18 11:30:24 managed-node2 sudo[24097]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 18 11:30:24 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 18 11:30:24 managed-node2 systemd[22916]: Started podman-24108.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 18 11:30:24 managed-node2 systemd[22916]: Created slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice - cgroup user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 18 11:30:24 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 18 11:30:24 managed-node2 systemd[22916]: Started rootless-netns-cdefcea5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 18 11:30:24 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:30:24 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 18 11:30:24 managed-node2 systemd[22916]: Started run-p24190-i24490.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 18 11:30:24 managed-node2 aardvark-dns[24190]: starting aardvark on a child with pid 24191 Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Successfully parsed config Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v6 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 18 11:30:24 managed-node2 conmon[24206]: conmon 4e247833c025794e6d04 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 18 11:30:24 managed-node2 systemd[22916]: Started libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : terminal_ctrl_fd: 14 Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : winsz read side: 17, winsz write side: 18 Jan 18 11:30:24 managed-node2 systemd[22916]: Started libpod-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : container PID: 24210 Jan 18 11:30:24 managed-node2 conmon[24208]: conmon 4e247833c025794e6d04 : container 24210 exited with status 127 Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e)" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="Using sqlite as database backend" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph driver overlay" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using transient store: false" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Initializing event backend file" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="Setting parallel job count to 7" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Cleaning up container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-3db7a625-f37c-48f9-4131-62aa9e219beb for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network ffb9a78e5d96102e63d60f7c7ce353d5163cf289f06efb872ce4d70b43a3efc3 bridge podman1 2025-01-18 11:30:24.308994366 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 2 networks" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Received SIGHUP Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Successfully parsed config Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v4 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: Listen v6 ip {} Jan 18 11:30:24 managed-node2 aardvark-dns[24191]: No configuration found stopping the sever Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:30:24 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:30:24 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_ffb9a78e_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24\n" Jan 18 11:30:24 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:24-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_ffb9a78e_10_89_0_0_nm24_dnat\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Cleaning up rootless network namespace" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Successfully cleaned up container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Unmounted container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\"" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e)" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=debug msg="Shutting down engines" Jan 18 11:30:25 managed-node2 /usr/bin/podman[24212]: time="2025-01-18T11:30:25-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24212 Jan 18 11:30:25 managed-node2 systemd[22916]: Stopping libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 62. Jan 18 11:30:25 managed-node2 systemd[22916]: Stopped libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 62 and the job result is done. Jan 18 11:30:25 managed-node2 systemd[22916]: Removed slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice - cgroup user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 61 and the job result is done. Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: [starting container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678: cannot get namespace path unless container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e is running: container is stopped] Pod: ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323 Container: 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-18T11:30:24-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-18T11:30:24-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-18T11:30:24-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-18T11:30:24-05:00" level=info msg="Using sqlite as database backend" time="2025-01-18T11:30:24-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph driver overlay" time="2025-01-18T11:30:24-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-18T11:30:24-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-18T11:30:24-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-18T11:30:24-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-18T11:30:24-05:00" level=debug msg="Using transient store: false" time="2025-01-18T11:30:24-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-18T11:30:24-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-18T11:30:24-05:00" level=debug msg="Initializing event backend file" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-18T11:30:24-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-18T11:30:24-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-18T11:30:24-05:00" level=debug msg="found free device name podman1" time="2025-01-18T11:30:24-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c225,c921\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container ID: 2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d" time="2025-01-18T11:30:24-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-18T11:30:24-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-18T11:30:24-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\"" time="2025-01-18T11:30:24-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-18T11:30:24-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-18T11:30:24-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\"" time="2025-01-18T11:30:24-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-18T11:30:24-05:00" level=debug msg="layer list: [\"9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830\"]" time="2025-01-18T11:30:24-05:00" level=debug msg="using \"/var/tmp/buildah1992200273\" to hold temporary data" time="2025-01-18T11:30:24-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830/diff" time="2025-01-18T11:30:24-05:00" level=debug msg="layer \"9d8d459e4ef4fa8a17eed866c3dda4831e19f4d8c40ddcf2b3f408f85aff9830\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-18T11:30:24-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-18T16:30:24.477854787Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-18T16:30:24.450709884Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-18T16:30:24.481026717Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-18T11:30:24-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-18T16:30:24.477854787Z\",\"container\":\"2d2b27d2d138efcf3ea7ce9b87b8bd1e0f28d2dca6ad51fdd5cb4bcf6a09639d\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-18T16:30:24.450709884Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-18T16:30:24.481026717Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:26d2711eba2df3c4227ee8881a1c0b67f1575d8e7862baece5db34e1fa692beb\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-18T11:30:24-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-18T11:30:24-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-18T11:30:24-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-18T11:30:24-05:00" level=debug msg="Overall: allowed" time="2025-01-18T11:30:24-05:00" level=debug msg="start reading config" time="2025-01-18T11:30:24-05:00" level=debug msg="finished reading config" time="2025-01-18T11:30:24-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-18T11:30:24-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-18T11:30:24-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-18T11:30:24-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-18T11:30:24-05:00" level=debug msg="No compression detected" time="2025-01-18T11:30:24-05:00" level=debug msg="Using original blob without modification" time="2025-01-18T11:30:24-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-18T11:30:24-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-18T11:30:24-05:00" level=debug msg="No compression detected" time="2025-01-18T11:30:24-05:00" level=debug msg="Compression change for blob sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-18T11:30:24-05:00" level=debug msg="Using original blob without modification" time="2025-01-18T11:30:24-05:00" level=debug msg="setting image creation date to 2025-01-18 16:30:24.477854787 +0000 UTC" time="2025-01-18T11:30:24-05:00" level=debug msg="created new image ID \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\" with metadata \"{}\"" time="2025-01-18T11:30:24-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-18T11:30:24-05:00" level=debug msg="printing final image id \"74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice for parent user.slice and name libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747" time="2025-01-18T11:30:24-05:00" level=debug msg="using systemd mode: false" time="2025-01-18T11:30:24-05:00" level=debug msg="setting container name ecc89624fd15-infra" time="2025-01-18T11:30:24-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network ffb9a78e5d96102e63d60f7c7ce353d5163cf289f06efb872ce4d70b43a3efc3 bridge podman1 2025-01-18 11:30:24.308994366 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-18T11:30:24-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-18T11:30:24-05:00" level=debug msg="Allocated lock 1 for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:74da33da3b290d57b5e3cd4f0642c04e3a9aad2dd6eb9733a8edc163fcd3b747\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" has run directory \"/run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-18T11:30:24-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-18T11:30:24-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-18T11:30:24-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-18T11:30:24-05:00" level=debug msg="using systemd mode: false" time="2025-01-18T11:30:24-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-18T11:30:24-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-18T11:30:24-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-18T11:30:24-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /proc" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /sys" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-18T11:30:24-05:00" level=debug msg="Allocated lock 2 for container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" has run directory \"/run/user/3001/containers/overlay-containers/1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678/userdata\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Strongconnecting node 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Pushed 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e onto stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Finishing node 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e. Popped 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e off stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Strongconnecting node 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Pushed 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 onto stack" time="2025-01-18T11:30:24-05:00" level=debug msg="Finishing node 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678. Popped 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 off stack" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JSDQ6XK5RFVFINHXICJFEDNKJ6,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c354,c759\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-3db7a625-f37c-48f9-4131-62aa9e219beb for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Mounted container \"4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created root filesystem for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e at /home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged" time="2025-01-18T11:30:24-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-18T11:30:24-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-18T11:30:24-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_ffb9a78e_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "4a:66:31:f9:f1:6c", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport3056496677/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport3056496677/.bp.sock]\"\ntime=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport: time=\"2025-01-18T11:30:24-05:00\" level=info msg=Ready\n" time="2025-01-18T11:30:24-05:00" level=debug msg="rootlessport is ready" time="2025-01-18T11:30:24-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-18T11:30:24-05:00" level=debug msg="Setting Cgroups for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e to user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice:libpod:4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-18T11:30:24-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/f38eb63b5255bea370660ade2382dd866b105a6f7d6867fc251c291371ba4a58/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created OCI spec for container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/config.json" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice for parent user.slice and name libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323" time="2025-01-18T11:30:24-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" time="2025-01-18T11:30:24-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-18T11:30:24-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e -u 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata -p /run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/pidfile -n ecc89624fd15-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e]" time="2025-01-18T11:30:24-05:00" level=info msg="Running conmon under slice user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice and unitName libpod-conmon-4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-18T11:30:24-05:00" level=debug msg="Received: 24210" time="2025-01-18T11:30:24-05:00" level=info msg="Got Conmon PID as 24208" time="2025-01-18T11:30:24-05:00" level=debug msg="Created container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e in OCI runtime" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-18T11:30:24-05:00" level=debug msg="Starting container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e with command [/catatonit -P]" time="2025-01-18T11:30:24-05:00" level=debug msg="Started container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/ACFCPDR6S36YG2XL23UOZJXHEP,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c354,c759\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Mounted container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/merged\"" time="2025-01-18T11:30:24-05:00" level=debug msg="Created root filesystem for container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 at /home/podman_basic_user/.local/share/containers/storage/overlay/1023185ec6d6a282b985a27cae520d0884d7318770b37051a554f91cc10afa0c/merged" time="2025-01-18T11:30:24-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-18T11:30:24-05:00" level=debug msg="Cleaning up container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Unmounted container \"1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678\"" starting container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678: cannot get namespace path unless container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e is running: container is stopped Error: failed to start 1 containers time="2025-01-18T11:30:24-05:00" level=debug msg="Shutting down engines" time="2025-01-18T11:30:24-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24108 time="2025-01-18T11:30:24-05:00" level=debug msg="Adding parallel job to stop container 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e" time="2025-01-18T11:30:24-05:00" level=debug msg="Adding parallel job to stop container 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678" time="2025-01-18T11:30:24-05:00" level=debug msg="Stopping ctr 1cf4dd0f43f7016920e29f1ab08c695a05f6469ead495caa9300d983f7605678 (timeout 10)" time="2025-01-18T11:30:25-05:00" level=debug msg="Stopping ctr 4e247833c025794e6d042124fd3dbddde74041cfb67841895953d1520867e77e (timeout 10)" time="2025-01-18T11:30:25-05:00" level=debug msg="Removing pod cgroup user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_ecc89624fd150903de3d8406f631d4530d0728560e903744f4aceb8883d57323.slice" Jan 18 11:30:25 managed-node2 python3.12[24100]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jan 18 11:30:25 managed-node2 sudo[24097]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 18 11:30:25 managed-node2 python3.12[24360]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:26 managed-node2 python3.12[24492]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:26 managed-node2 python3.12[24623]: ansible-file Invoked with path=/tmp/lsr_8do_wnm9_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:28 managed-node2 python3.12[24797]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:30:29 managed-node2 python3.12[24957]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:30 managed-node2 python3.12[25088]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:32 managed-node2 python3.12[25231]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[632]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 dbus-broker-launch[23391]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:33 managed-node2 systemd[1]: Reload requested from client PID 25250 ('systemctl') (unit session-5.scope)... Jan 18 11:30:33 managed-node2 systemd[1]: Reloading... Jan 18 11:30:34 managed-node2 systemd-rc-local-generator[25296]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:34 managed-node2 systemd-ssh-generator[25298]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:34 managed-node2 (sd-exec-[25271]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:34 managed-node2 systemd[1]: Reloading finished in 209 ms. Jan 18 11:30:34 managed-node2 systemd[1]: Started run-p25310-i25610.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p25310-i25610.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p25310-i25610.service has finished successfully. ░░ ░░ The job identifier is 2202. Jan 18 11:30:34 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2280. Jan 18 11:30:34 managed-node2 systemd[1]: Reload requested from client PID 25314 ('systemctl') (unit session-5.scope)... Jan 18 11:30:34 managed-node2 systemd[1]: Reloading... Jan 18 11:30:34 managed-node2 systemd-rc-local-generator[25366]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:34 managed-node2 systemd-ssh-generator[25368]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:34 managed-node2 (sd-exec-[25339]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:34 managed-node2 systemd[1]: Reloading finished in 309 ms. Jan 18 11:30:34 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:30:35 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:30:35 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2280. Jan 18 11:30:35 managed-node2 systemd[1]: run-p25310-i25610.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p25310-i25610.service has successfully entered the 'dead' state. Jan 18 11:30:35 managed-node2 python3.12[25510]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:36 managed-node2 python3.12[25641]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:36 managed-node2 python3.12[25772]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:36 managed-node2 systemd[1]: Reload requested from client PID 25775 ('systemctl') (unit session-5.scope)... Jan 18 11:30:36 managed-node2 systemd[1]: Reloading... Jan 18 11:30:36 managed-node2 systemd-ssh-generator[25823]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:36 managed-node2 systemd-rc-local-generator[25821]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:36 managed-node2 (sd-exec-[25796]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:30:37 managed-node2 systemd[1]: Reloading finished in 195 ms. Jan 18 11:30:37 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2358. Jan 18 11:30:37 managed-node2 (rtmonger)[25833]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 18 11:30:37 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2358. Jan 18 11:30:37 managed-node2 python3.12[25991]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:37 managed-node2 certmonger[25833]: 2025-01-18 11:30:37 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 certmonger[26006]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 18 11:30:38 managed-node2 certmonger[25833]: 2025-01-18 11:30:38 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:38 managed-node2 python3.12[26137]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 18 11:30:38 managed-node2 python3.12[26268]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 18 11:30:39 managed-node2 python3.12[26399]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 18 11:30:39 managed-node2 python3.12[26530]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:39 managed-node2 certmonger[25833]: 2025-01-18 11:30:39 [25833] Wrote to /var/lib/certmonger/requests/20250118163037 Jan 18 11:30:40 managed-node2 python3.12[26662]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:40 managed-node2 python3.12[26793]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:40 managed-node2 python3.12[26924]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:41 managed-node2 python3.12[27055]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:41 managed-node2 python3.12[27186]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:43 managed-node2 python3.12[27448]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:44 managed-node2 python3.12[27585]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:30:44 managed-node2 python3.12[27717]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:46 managed-node2 python3.12[27850]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:46 managed-node2 python3.12[27981]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:47 managed-node2 python3.12[28112]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:30:48 managed-node2 python3.12[28244]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:30:48 managed-node2 python3.12[28377]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:30:49 managed-node2 python3.12[28510]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:50 managed-node2 python3.12[28641]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:30:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:56 managed-node2 python3.12[29252]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:58 managed-node2 python3.12[29385]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:58 managed-node2 python3.12[29516]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:59 managed-node2 python3.12[29621]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217858.56248-14288-74493159750705/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:59 managed-node2 python3.12[29752]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:59 managed-node2 systemd[1]: Reload requested from client PID 29753 ('systemctl') (unit session-5.scope)... Jan 18 11:30:59 managed-node2 systemd[1]: Reloading... Jan 18 11:30:59 managed-node2 systemd-ssh-generator[29803]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:30:59 managed-node2 systemd-rc-local-generator[29801]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:30:59 managed-node2 (sd-exec-[29775]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:00 managed-node2 systemd[1]: Reloading finished in 196 ms. Jan 18 11:31:00 managed-node2 python3.12[29940]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:00 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2437. Jan 18 11:31:00 managed-node2 quadlet-demo-network[29944]: systemd-quadlet-demo Jan 18 11:31:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:00 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2437. Jan 18 11:31:01 managed-node2 python3.12[30084]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:03 managed-node2 python3.12[30217]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:03 managed-node2 python3.12[30348]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:03 managed-node2 python3.12[30453]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217863.216999-14492-11702154848732/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:04 managed-node2 python3.12[30584]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:04 managed-node2 systemd[1]: Reload requested from client PID 30585 ('systemctl') (unit session-5.scope)... Jan 18 11:31:04 managed-node2 systemd[1]: Reloading... Jan 18 11:31:04 managed-node2 systemd-ssh-generator[30634]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:04 managed-node2 systemd-rc-local-generator[30632]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:04 managed-node2 (sd-exec-[30607]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:04 managed-node2 systemd[1]: Reloading finished in 204 ms. Jan 18 11:31:05 managed-node2 python3.12[30772]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:05 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2521. Jan 18 11:31:05 managed-node2 podman[30776]: 2025-01-18 11:31:05.315874535 -0500 EST m=+0.024652443 volume create systemd-quadlet-demo-mysql Jan 18 11:31:05 managed-node2 quadlet-demo-mysql-volume[30776]: systemd-quadlet-demo-mysql Jan 18 11:31:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:05 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2521. Jan 18 11:31:06 managed-node2 python3.12[30914]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:07 managed-node2 python3.12[31047]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:17 managed-node2 podman[31187]: 2025-01-18 11:31:17.730931599 -0500 EST m=+9.639813451 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:18 managed-node2 python3.12[31495]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:18 managed-node2 python3.12[31626]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:19 managed-node2 python3.12[31731]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217878.4555283-14896-65681884352884/.source.container _original_basename=.g6l7hkjm follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:19 managed-node2 python3.12[31862]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 31863 ('systemctl') (unit session-5.scope)... Jan 18 11:31:19 managed-node2 systemd[1]: Reloading... Jan 18 11:31:19 managed-node2 systemd-ssh-generator[31906]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:19 managed-node2 systemd-rc-local-generator[31904]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:19 managed-node2 (sd-exec-[31885]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:19 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 18 11:31:20 managed-node2 python3.12[32050]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:20 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2605. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.496650499 -0500 EST m=+0.049779030 container create c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:20 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5264] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5289] device (veth0): carrier: link connected Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5292] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5296] device (podman2): carrier: link connected Jan 18 11:31:20 managed-node2 (udev-worker)[32069]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:20 managed-node2 (udev-worker)[32067]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5505] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5529] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5548] device (podman2): Activation: starting connection 'podman2' (1074efbb-9529-43ba-921b-6e9ef70edf68) Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5557] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5565] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5573] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5582] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.477039879 -0500 EST m=+0.030168563 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:20 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2691. Jan 18 11:31:20 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2691. Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5970] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5972] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.5977] device (podman2): Activation: successful, device activated. Jan 18 11:31:20 managed-node2 systemd[1]: Started run-p32102-i32402.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32102-i32402.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32102-i32402.scope has finished successfully. ░░ ░░ The job identifier is 2770. Jan 18 11:31:20 managed-node2 systemd[1]: Started c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer - [systemd-run] /usr/bin/podman healthcheck run c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac. ░░ Subject: A start job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished successfully. ░░ ░░ The job identifier is 2776. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.714063203 -0500 EST m=+0.267191817 container init c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2605. Jan 18 11:31:20 managed-node2 podman[32054]: 2025-01-18 11:31:20.744374337 -0500 EST m=+0.297503364 container start c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 quadlet-demo-mysql[32054]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac Jan 18 11:31:20 managed-node2 podman[32114]: 2025-01-18 11:31:20.798229509 -0500 EST m=+0.058335837 container died c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 systemd[1]: Stopped c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer - [systemd-run] /usr/bin/podman healthcheck run c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac. ░░ Subject: A stop job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.timer has finished. ░░ ░░ The job identifier is 2932 and the job result is done. Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:20 managed-node2 systemd[1]: c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-21cb1526229c8aa.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 systemd[1]: run-p32102-i32402.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32102-i32402.scope has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:20 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:20 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:20 managed-node2 NetworkManager[709]: [1737217880.8571] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:20 managed-node2 systemd[1]: run-netns-netns\x2dd7ac6bec\x2d1d2f\x2d2dc3\x2d6ae5\x2d3ad68d6e13c0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd7ac6bec\x2d1d2f\x2d2dc3\x2d6ae5\x2d3ad68d6e13c0.mount has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:20 managed-node2 podman[32114]: 2025-01-18 11:31:20.934960057 -0500 EST m=+0.195066423 container remove c23aaf4d5b83ecf6ba09fda87a6c151245d8f4a3a38d5967af4503e9be96f8ac (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:20 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:20 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f44b8f9f5782cefbf6ffe68a333b8178d190e6631786d7d41cf9b947cad743d5-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f44b8f9f5782cefbf6ffe68a333b8178d190e6631786d7d41cf9b947cad743d5-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:21 managed-node2 python3.12[32286]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:22 managed-node2 python3.12[32419]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:23 managed-node2 python3.12[32550]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:23 managed-node2 python3.12[32655]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217883.142967-15089-251893677799058/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:24 managed-node2 python3.12[32786]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:24 managed-node2 systemd[1]: Reload requested from client PID 32787 ('systemctl') (unit session-5.scope)... Jan 18 11:31:24 managed-node2 systemd[1]: Reloading... Jan 18 11:31:24 managed-node2 systemd-rc-local-generator[32836]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:24 managed-node2 systemd-ssh-generator[32839]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:24 managed-node2 (sd-exec-[32810]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:24 managed-node2 systemd[1]: Reloading finished in 205 ms. Jan 18 11:31:25 managed-node2 python3.12[32975]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:26 managed-node2 python3.12[33108]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:27 managed-node2 python3.12[33239]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:27 managed-node2 python3.12[33344]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217887.039937-15252-234748873817093/.source.yml _original_basename=.9j201qqb follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:28 managed-node2 python3.12[33475]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:28 managed-node2 systemd[1]: Reload requested from client PID 33476 ('systemctl') (unit session-5.scope)... Jan 18 11:31:28 managed-node2 systemd[1]: Reloading... Jan 18 11:31:28 managed-node2 systemd-ssh-generator[33525]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:28 managed-node2 systemd-rc-local-generator[33523]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:28 managed-node2 (sd-exec-[33499]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:28 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 18 11:31:29 managed-node2 python3.12[33665]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:30 managed-node2 python3.12[33798]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 18 11:31:30 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:31:31 managed-node2 python3.12[33930]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:31 managed-node2 python3.12[34061]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 podman[34200]: 2025-01-18 11:31:45.223512241 -0500 EST m=+13.268197810 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 systemd[4347]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:31:45 managed-node2 systemd[4347]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:31:45 managed-node2 systemd[4347]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:49 managed-node2 podman[34625]: 2025-01-18 11:31:49.789273837 -0500 EST m=+4.055715037 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 18 11:31:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:50 managed-node2 python3.12[34888]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:50 managed-node2 python3.12[35019]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:31:50 managed-node2 python3.12[35124]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217910.379617-15884-151214228727689/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:51 managed-node2 python3.12[35255]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:51 managed-node2 systemd[1]: Reload requested from client PID 35256 ('systemctl') (unit session-5.scope)... Jan 18 11:31:51 managed-node2 systemd[1]: Reloading... Jan 18 11:31:51 managed-node2 systemd-rc-local-generator[35302]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:31:51 managed-node2 systemd-ssh-generator[35304]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:31:51 managed-node2 (sd-exec-[35278]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:31:51 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 18 11:31:52 managed-node2 python3.12[35443]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:31:52 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2935. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.454404162 -0500 EST m=+0.049698892 container create c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4781] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:52 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4872] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 18 11:31:52 managed-node2 (udev-worker)[35457]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:52 managed-node2 (udev-worker)[35458]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4933] device (veth0): carrier: link connected Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.4942] device (podman2): carrier: link connected Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5102] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5118] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5134] device (podman2): Activation: starting connection 'podman2' (f4ffa46c-5a51-42dc-9f79-499ce208f1df) Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5137] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5140] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5142] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5145] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.434260103 -0500 EST m=+0.029554934 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:31:52 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3021. Jan 18 11:31:52 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3021. Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5626] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5629] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.5636] device (podman2): Activation: successful, device activated. Jan 18 11:31:52 managed-node2 systemd[1]: Started run-p35491-i35791.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35491-i35791.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35491-i35791.scope has finished successfully. ░░ ░░ The job identifier is 3100. Jan 18 11:31:52 managed-node2 systemd[1]: Started c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer - [systemd-run] /usr/bin/podman healthcheck run c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec. ░░ Subject: A start job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished successfully. ░░ ░░ The job identifier is 3106. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.654249735 -0500 EST m=+0.249544531 container init c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2935. Jan 18 11:31:52 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 2934. Jan 18 11:31:52 managed-node2 podman[35447]: 2025-01-18 11:31:52.687513724 -0500 EST m=+0.282808666 container start c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 quadlet-demo-mysql[35447]: c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec Jan 18 11:31:52 managed-node2 podman[35508]: 2025-01-18 11:31:52.721738515 -0500 EST m=+0.050046531 container died c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Pods stopped: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Pods removed: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Secrets removed: Jan 18 11:31:52 managed-node2 quadlet-demo[35509]: Volumes removed: Jan 18 11:31:52 managed-node2 systemd[1]: c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 systemd[1]: Stopped c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer - [systemd-run] /usr/bin/podman healthcheck run c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec. ░░ Subject: A stop job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-68378f64dc5bc1c8.timer has finished. ░░ ░░ The job identifier is 3262 and the job result is done. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.762492215 -0500 EST m=+0.066794290 volume create wp-pv-claim Jan 18 11:31:52 managed-node2 systemd[1]: run-p35491-i35791.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35491-i35791.scope has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:52 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:52 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:52 managed-node2 NetworkManager[709]: [1737217912.7929] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.796853397 -0500 EST m=+0.101155878 container create ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:52 managed-node2 systemd[1]: run-netns-netns\x2de8f3d60e\x2d94aa\x2de59c\x2d25d5\x2d7ab49622e9c0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2de8f3d60e\x2d94aa\x2de59c\x2d25d5\x2d7ab49622e9c0.mount has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.884198004 -0500 EST m=+0.188500074 volume create envoy-proxy-config Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.889315404 -0500 EST m=+0.193617500 volume create envoy-certificates Jan 18 11:31:52 managed-node2 systemd[1]: Created slice machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice - cgroup machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice. ░░ Subject: A start job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished successfully. ░░ ░░ The job identifier is 3264. Jan 18 11:31:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:52 managed-node2 podman[35508]: 2025-01-18 11:31:52.946202817 -0500 EST m=+0.274510864 container remove c9e0c0b61f5bf1f4e67dfcaa03feb6e7ede00a21a6836db8f69fee04a7766bec (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 18 11:31:52 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.968071448 -0500 EST m=+0.272373929 container create 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:52 managed-node2 podman[35509]: 2025-01-18 11:31:52.973088105 -0500 EST m=+0.277390350 pod create a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:53 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.023105281 -0500 EST m=+0.327407444 container create 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:52.983960024 -0500 EST m=+0.288262529 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.027034276 -0500 EST m=+0.331336488 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.051903944 -0500 EST m=+0.356206095 container create 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.052331434 -0500 EST m=+0.356633525 container restart ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: Started libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope - libcrun container. ░░ Subject: A start job for unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has finished successfully. ░░ ░░ The job identifier is 3270. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.17129751 -0500 EST m=+0.475599758 container init ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.174529033 -0500 EST m=+0.478831206 container start ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307.scope has successfully entered the 'dead' state. Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:53 managed-node2 kernel: veth0: entered allmulticast mode Jan 18 11:31:53 managed-node2 kernel: veth0: entered promiscuous mode Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.1975] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:53 managed-node2 (udev-worker)[35479]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 18 11:31:53 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2045] device (podman2): carrier: link connected Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2064] device (veth0): carrier: link connected Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2083] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2519] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2526] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2570] device (podman2): Activation: starting connection 'podman2' (ced5e191-5f1a-46ed-8291-387c8b116197) Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2573] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2592] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2595] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2612] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2652] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2655] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:31:53 managed-node2 NetworkManager[709]: [1737217913.2672] device (podman2): Activation: successful, device activated. Jan 18 11:31:53 managed-node2 podman[35561]: 2025-01-18 11:31:53.284091379 -0500 EST m=+0.088789796 container died ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: Started run-p35579-i35879.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35579-i35879.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35579-i35879.scope has finished successfully. ░░ ░░ The job identifier is 3276. Jan 18 11:31:53 managed-node2 systemd[1]: Started libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope - libcrun container. ░░ Subject: A start job for unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has finished successfully. ░░ ░░ The job identifier is 3282. Jan 18 11:31:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f1dbbe6c0fb598c05b50f2c97c55a37b7cb307d3fa1b5af688b4e8201f175b72-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f1dbbe6c0fb598c05b50f2c97c55a37b7cb307d3fa1b5af688b4e8201f175b72-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.863507177 -0500 EST m=+1.167809315 container init 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:53 managed-node2 podman[35509]: 2025-01-18 11:31:53.866449176 -0500 EST m=+1.170772112 container start 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:53 managed-node2 systemd[1]: libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3.scope has successfully entered the 'dead' state. Jan 18 11:31:54 managed-node2 podman[35509]: 2025-01-18 11:31:54.442475962 -0500 EST m=+1.746778080 container died 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:55 managed-node2 podman[35561]: 2025-01-18 11:31:55.673439255 -0500 EST m=+2.478137793 container cleanup ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:55 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:55 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:31:55 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:31:55 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 18 11:31:55 managed-node2 systemd[1]: run-p35579-i35879.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35579-i35879.scope has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 NetworkManager[709]: [1737217915.7054] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:31:55 managed-node2 systemd[1]: run-netns-netns\x2d8d85ee07\x2d973d\x2ddc50\x2d8011\x2d55a30378cf41.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d8d85ee07\x2d973d\x2ddc50\x2d8011\x2d55a30378cf41.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-cc18e647f3c0c280b4a52f2ebcc0bacc11186374de68b15e48d68c86119aa73d-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-cc18e647f3c0c280b4a52f2ebcc0bacc11186374de68b15e48d68c86119aa73d-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 podman[35597]: 2025-01-18 11:31:55.773954339 -0500 EST m=+1.686521610 container cleanup 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ee045950926b1e979554c028958a6f97be0fb1297c403876ed2a177339fcb7b8-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ee045950926b1e979554c028958a6f97be0fb1297c403876ed2a177339fcb7b8-merged.mount has successfully entered the 'dead' state. Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: [starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped] Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: [starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped starting container 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped] Jan 18 11:31:55 managed-node2 systemd[1]: quadlet-demo.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Jan 18 11:31:55 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 2934. Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Volumes: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: wp-pv-claim Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Pod: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Containers: Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: starting container 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: starting container 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b: cannot get namespace path unless container 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 is running: container is stopped Jan 18 11:31:55 managed-node2 quadlet-demo[35509]: Error: failed to start 2 containers Jan 18 11:31:55 managed-node2 systemd[1]: quadlet-demo.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 18 11:31:55 managed-node2 podman[35620]: 2025-01-18 11:31:55.939942255 -0500 EST m=+0.038666062 pod stop a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:55 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice - cgroup machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718.slice has finished. ░░ ░░ The job identifier is 3289 and the job result is done. Jan 18 11:31:55 managed-node2 podman[35620]: 2025-01-18 11:31:55.993017214 -0500 EST m=+0.091740928 container remove 4b516baa98043afa93c6a79d4758790753b9566bfe5ea033225e996306507a8e (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.012314821 -0500 EST m=+0.111038538 container remove 54ac665dc4b5db861fdaa07c016ad0171534108c182850b042590fb55742b41b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.035462304 -0500 EST m=+0.134186123 container remove 31a0fad686b3f8a96df9bec1ce38218427a7b55b3827fbc3f11a6c6256bbaeb3 (image=localhost/podman-pause:5.3.1-1733097600, name=a217dea02776-infra, pod_id=a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.0440827 -0500 EST m=+0.142806389 pod remove a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 (image=, name=quadlet-demo) Jan 18 11:31:56 managed-node2 podman[35620]: 2025-01-18 11:31:56.120942771 -0500 EST m=+0.219666484 container remove ebaa7123818d7b22f46a8d9a19742b43fccbb493eeaf6330c7a2021249095307 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Pods stopped: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Pods removed: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: a217dea0277624fd7200269408564d436617782e6d29092ca6d35c8b73388718 Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Secrets removed: Jan 18 11:31:56 managed-node2 quadlet-demo[35620]: Volumes removed: Jan 18 11:31:56 managed-node2 systemd[1]: quadlet-demo.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:31:56 managed-node2 python3.12[35760]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:56 managed-node2 python3.12[35892]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:57 managed-node2 python3.12[36030]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:57 managed-node2 python3.12[36169]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:31:58 managed-node2 python3.12[36308]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:58 managed-node2 python3.12[36442]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:04 managed-node2 python3.12[36573]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:05 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 18 11:32:09 managed-node2 python3.12[36705]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:15 managed-node2 python3.12[36836]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:20 managed-node2 python3.12[36967]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:26 managed-node2 python3.12[37098]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:31 managed-node2 python3.12[37229]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:31 managed-node2 python3.12[37360]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:32 managed-node2 systemd[22916]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 65. Jan 18 11:32:32 managed-node2 python3.12[37492]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:32 managed-node2 systemd[22916]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 65. Jan 18 11:32:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:32 managed-node2 python3.12[37633]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:33 managed-node2 python3.12[37771]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:33 managed-node2 python3.12[37905]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:35 managed-node2 python3.12[38168]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:36 managed-node2 python3.12[38305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:38 managed-node2 python3.12[38438]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 18 11:32:39 managed-node2 python3.12[38570]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 18 11:32:40 managed-node2 python3.12[38703]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 18 11:32:41 managed-node2 python3.12[38836]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:32:41 managed-node2 python3.12[38967]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 18 11:32:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:46 managed-node2 python3.12[39515]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:47 managed-node2 python3.12[39648]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:32:47 managed-node2 systemd[1]: Reload requested from client PID 39651 ('systemctl') (unit session-5.scope)... Jan 18 11:32:47 managed-node2 systemd[1]: Reloading... Jan 18 11:32:47 managed-node2 systemd-rc-local-generator[39699]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:47 managed-node2 systemd-ssh-generator[39701]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:47 managed-node2 (sd-exec-[39673]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:48 managed-node2 systemd[1]: Reloading finished in 206 ms. Jan 18 11:32:48 managed-node2 python3.12[39838]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:49 managed-node2 python3.12[40102]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:50 managed-node2 python3.12[40233]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:50 managed-node2 systemd[1]: Reload requested from client PID 40234 ('systemctl') (unit session-5.scope)... Jan 18 11:32:50 managed-node2 systemd[1]: Reloading... Jan 18 11:32:50 managed-node2 systemd-ssh-generator[40284]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:50 managed-node2 systemd-rc-local-generator[40282]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:50 managed-node2 (sd-exec-[40256]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:50 managed-node2 systemd[1]: Reloading finished in 200 ms. Jan 18 11:32:50 managed-node2 python3.12[40421]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:50.996594067 -0500 EST m=+0.029204724 image untag 00e7ed281fae0a457d309c3c4887646b98c1226f1ff60de69ef58bf1e343c789 localhost/podman-pause:5.3.1-1733097600 Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:50.987938653 -0500 EST m=+0.020549520 image remove 00e7ed281fae0a457d309c3c4887646b98c1226f1ff60de69ef58bf1e343c789 Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:51.013161014 -0500 EST m=+0.045771685 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:50.996602901 -0500 EST m=+0.029213536 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:51.825350411 -0500 EST m=+0.857961054 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:32:51 managed-node2 podman[40422]: 2025-01-18 11:32:51.013171107 -0500 EST m=+0.045781743 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Jan 18 11:32:52 managed-node2 podman[40422]: 2025-01-18 11:32:52.557093232 -0500 EST m=+1.589703985 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 18 11:32:52 managed-node2 podman[40422]: 2025-01-18 11:32:51.825361272 -0500 EST m=+0.857971906 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Jan 18 11:32:52 managed-node2 podman[40422]: 2025-01-18 11:32:52.688702547 -0500 EST m=+1.721313216 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 18 11:32:52 managed-node2 podman[40422]: 2025-01-18 11:32:52.557130885 -0500 EST m=+1.589741620 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Jan 18 11:32:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:53 managed-node2 python3.12[40561]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:53 managed-node2 python3.12[40700]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:54 managed-node2 python3.12[40839]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:54 managed-node2 python3.12[40977]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:56 managed-node2 python3.12[41392]: ansible-service_facts Invoked Jan 18 11:32:58 managed-node2 python3.12[41631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:00 managed-node2 python3.12[41764]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:01 managed-node2 python3.12[42028]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:33:02 managed-node2 python3.12[42159]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:33:02 managed-node2 systemd[1]: Reload requested from client PID 42160 ('systemctl') (unit session-5.scope)... Jan 18 11:33:02 managed-node2 systemd[1]: Reloading... Jan 18 11:33:02 managed-node2 systemd-rc-local-generator[42207]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:02 managed-node2 systemd-ssh-generator[42209]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:02 managed-node2 (sd-exec-[42182]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:02 managed-node2 systemd[1]: Reloading finished in 201 ms. Jan 18 11:33:03 managed-node2 podman[42348]: 2025-01-18 11:33:03.010594631 -0500 EST m=+0.028228964 volume remove envoy-proxy-config Jan 18 11:33:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:03 managed-node2 podman[42486]: 2025-01-18 11:33:03.443019178 -0500 EST m=+0.024593264 volume remove envoy-certificates Jan 18 11:33:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:03 managed-node2 podman[42625]: 2025-01-18 11:33:03.878895119 -0500 EST m=+0.027689233 volume remove wp-pv-claim Jan 18 11:33:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:04 managed-node2 python3.12[42763]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:05 managed-node2 python3.12[42901]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:05 managed-node2 python3.12[43039]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:05 managed-node2 python3.12[43178]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:06 managed-node2 python3.12[43317]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:07 managed-node2 python3.12[43735]: ansible-service_facts Invoked Jan 18 11:33:10 managed-node2 python3.12[43974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:11 managed-node2 python3.12[44107]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:13 managed-node2 python3.12[44371]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:33:13 managed-node2 python3.12[44502]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:33:13 managed-node2 systemd[1]: Reload requested from client PID 44503 ('systemctl') (unit session-5.scope)... Jan 18 11:33:13 managed-node2 systemd[1]: Reloading... Jan 18 11:33:13 managed-node2 systemd-rc-local-generator[44553]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:13 managed-node2 systemd-ssh-generator[44555]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:13 managed-node2 (sd-exec-[44525]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:13 managed-node2 systemd[1]: Reloading finished in 200 ms. Jan 18 11:33:13 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 3291. Jan 18 11:33:14 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Jan 18 11:33:14 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 3291. Jan 18 11:33:14 managed-node2 python3.12[44692]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:15 managed-node2 python3.12[44830]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:15 managed-node2 python3.12[44968]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:16 managed-node2 python3.12[45106]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:16 managed-node2 python3.12[45244]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:17 managed-node2 python3.12[45661]: ansible-service_facts Invoked Jan 18 11:33:20 managed-node2 python3.12[45900]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:22 managed-node2 python3.12[46033]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:33:22 managed-node2 systemd[1]: Reload requested from client PID 46036 ('systemctl') (unit session-5.scope)... Jan 18 11:33:22 managed-node2 systemd[1]: Reloading... Jan 18 11:33:22 managed-node2 systemd-rc-local-generator[46080]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:22 managed-node2 systemd-ssh-generator[46085]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:22 managed-node2 (sd-exec-[46058]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:22 managed-node2 systemd[1]: Reloading finished in 191 ms. Jan 18 11:33:22 managed-node2 python3.12[46223]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:23 managed-node2 python3.12[46487]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:33:24 managed-node2 python3.12[46618]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:33:24 managed-node2 systemd[1]: Reload requested from client PID 46619 ('systemctl') (unit session-5.scope)... Jan 18 11:33:24 managed-node2 systemd[1]: Reloading... Jan 18 11:33:24 managed-node2 systemd-rc-local-generator[46666]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:24 managed-node2 systemd-ssh-generator[46668]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:24 managed-node2 (sd-exec-[46641]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:24 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 18 11:33:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:25 managed-node2 python3.12[46943]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:26 managed-node2 python3.12[47081]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:27 managed-node2 python3.12[47218]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:27 managed-node2 python3.12[47357]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:28 managed-node2 python3.12[47495]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:29 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:29 managed-node2 python3.12[47908]: ansible-service_facts Invoked Jan 18 11:33:32 managed-node2 python3.12[48147]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:34 managed-node2 python3.12[48280]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:33:34 managed-node2 systemd[1]: Reload requested from client PID 48283 ('systemctl') (unit session-5.scope)... Jan 18 11:33:34 managed-node2 systemd[1]: Reloading... Jan 18 11:33:34 managed-node2 systemd-rc-local-generator[48326]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:34 managed-node2 systemd-ssh-generator[48335]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:34 managed-node2 (sd-exec-[48305]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:35 managed-node2 systemd[1]: Reloading finished in 197 ms. Jan 18 11:33:35 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Jan 18 11:33:35 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 3369 and the job result is done. Jan 18 11:33:35 managed-node2 python3.12[48472]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:36 managed-node2 python3.12[48736]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:33:37 managed-node2 python3.12[48867]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:33:37 managed-node2 systemd[1]: Reload requested from client PID 48868 ('systemctl') (unit session-5.scope)... Jan 18 11:33:37 managed-node2 systemd[1]: Reloading... Jan 18 11:33:37 managed-node2 systemd-rc-local-generator[48918]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:37 managed-node2 systemd-ssh-generator[48921]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:37 managed-node2 (sd-exec-[48890]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:37 managed-node2 systemd[1]: Reloading finished in 196 ms. Jan 18 11:33:38 managed-node2 podman[49057]: 2025-01-18 11:33:38.355511651 -0500 EST m=+0.027364466 volume remove systemd-quadlet-demo-mysql Jan 18 11:33:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:39 managed-node2 python3.12[49194]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:39 managed-node2 python3.12[49331]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:40 managed-node2 python3.12[49469]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:40 managed-node2 python3.12[49608]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:41 managed-node2 python3.12[49746]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:42 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:42 managed-node2 python3.12[50161]: ansible-service_facts Invoked Jan 18 11:33:45 managed-node2 python3.12[50400]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:46 managed-node2 python3.12[50533]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:33:46 managed-node2 systemd[1]: Reload requested from client PID 50536 ('systemctl') (unit session-5.scope)... Jan 18 11:33:46 managed-node2 systemd[1]: Reloading... Jan 18 11:33:47 managed-node2 systemd-rc-local-generator[50576]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:47 managed-node2 systemd-ssh-generator[50580]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:47 managed-node2 (sd-exec-[50558]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:47 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 18 11:33:47 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Jan 18 11:33:47 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 3370 and the job result is done. Jan 18 11:33:47 managed-node2 python3.12[50725]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:33:48 managed-node2 python3.12[50989]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:33:49 managed-node2 python3.12[51120]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:33:49 managed-node2 systemd[1]: Reload requested from client PID 51121 ('systemctl') (unit session-5.scope)... Jan 18 11:33:49 managed-node2 systemd[1]: Reloading... Jan 18 11:33:49 managed-node2 systemd-rc-local-generator[51169]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:33:49 managed-node2 systemd-ssh-generator[51171]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:33:49 managed-node2 (sd-exec-[51143]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:33:49 managed-node2 systemd[1]: Reloading finished in 194 ms. Jan 18 11:33:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:50 managed-node2 python3.12[51447]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:50 managed-node2 python3.12[51585]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:51 managed-node2 python3.12[51723]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:51 managed-node2 python3.12[51861]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:52 managed-node2 python3.12[51999]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:33:53 managed-node2 python3.12[52413]: ansible-service_facts Invoked Jan 18 11:33:56 managed-node2 python3.12[52652]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:33:56 managed-node2 python3.12[52790]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=409 changed=47 unreachable=0 failed=2 skipped=442 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 18 January 2025 11:33:56 -0500 (0:00:00.449) 0:03:29.043 ****** =============================================================================== Check web -------------------------------------------------------------- 33.27s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 18.38s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present -- 10.40s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 4.08s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 3.14s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.36s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.21s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.16s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Prune images no longer in use -------- 2.12s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.09s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.05s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.03s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.00s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.35s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Gathering Facts --------------------------------------------------------- 1.21s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.17s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.12s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.certificate : Remove files -------------------- 1.11s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.11s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.06s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71